]> code.communitydata.science - cdsc_reddit.git/blob - start_spark_cluster.sh
make pass keyword arg to dataframe.drop
[cdsc_reddit.git] / start_spark_cluster.sh
1 #!/usr/bin/env bash
2 nodes="$(scontrol show hostnames)"
3
4 export SPARK_MASTER_HOST=$(hostname)
5 echo $SPARK_MASTER_HOST
6 # singularity instance stop spark-boss
7 # rm -r $HOME/.singularity/instances/sing/$(hostname)/nathante/spark-boss
8  
9 # for node in $nodes
10 # dol
11 #     echo $node
12 #     ssh $node "singularity instance stop --all -F"
13 # done
14
15 # singularity instance start /gscratch/comdata/users/nathante/cdsc_base.sif spark-boss
16 #apptainer exec /gscratch/comdata/users/nathante/containers/nathante.sif
17 start-master.sh 
18 for node in $nodes
19 do
20     # if [ "$node" != "$SPARK_BOSS" ]
21     # then
22     echo $node
23     ssh -t $node start_spark_worker.sh $SPARK_MASTER_HOST
24    # fi                         
25 done
26

Community Data Science Collective || Want to submit a patch?