]> code.communitydata.science - cdsc_reddit.git/blob - start_spark_worker.sh
make pass keyword arg to dataframe.drop
[cdsc_reddit.git] / start_spark_worker.sh
1 #!/usr/bin/env bash
2 # runs on worker node
3 # instance_name=spark-worker-$(hostname)
4 # echo $hostname
5 # instance_url="instance://$instance_name"
6 # singularity instance list
7 # singularity instance stop -F "$instance_name"
8 # singularity instance list
9 # sleep 5
10 # ls $HOME/.singularity/instances/sing/$(hostname)/nathante/$instance_name
11 # rm -r $HOME/.singularity/instances/sing/$(hostname)/nathante/$instance_name
12 # singularity instance start /gscratch/comdata/users/nathante/cdsc_base.sif $instance_name
13 source /gscratch/comdata/env/cdsc_klone_bashrc
14 source $SPARK_CONF_DIR/spark-env.sh
15 echo $(which python3)
16 echo $PYSPARK_PYTHON
17 echo "start-worker.sh spark://$1:$SPARK_MASTER_PORT"
18 start-worker.sh spark://$1:$SPARK_MASTER_PORT

Community Data Science Collective || Want to submit a patch?