-#!/usr/bin/env bash
+## this should be run manually since we don't have a nice way to wait on parallel_sql jobs
-echo "!#/usr/bin/bash" > job_script.sh
-echo "source $(pwd)/../bin/activate" >> job_script.sh
-echo "python3 $(pwd)/submissions_2_parquet_part1.py" >> job_script.sh
+#!/usr/bin/env bash
-srun -p comdata -A comdata --nodes=1 --mem=120G --time=48:00:00 job_script.sh
+./parse_submissions.sh
start_spark_and_run.sh 1 $(pwd)/submissions_2_parquet_part2.py