1 ## needs to be run by hand since i don't have a nice way of waiting on a parallel-sql job to complete
4 echo "#!/usr/bin/bash" > job_script.sh
5 echo "source $(pwd)/../bin/activate" >> job_script.sh
6 echo "python3 $(pwd)/comments_2_parquet_part1.py" >> job_script.sh
8 srun -p comdata -A comdata --nodes=1 --mem=120G --time=48:00:00 --pty job_script.sh
10 start_spark_and_run.sh 1 $(pwd)/comments_2_parquet_part2.py