Skip to content
Snippets Groups Projects
Commit 50ebf9a1 authored by Cassandra Grzonkowski's avatar Cassandra Grzonkowski
Browse files

naive approach model run with unknown token

parent 05ffc96d
Loading
...@@ -444,19 +444,20 @@ def custom_collate_fn(batch, max_len_specs, vocabulary, folder, specs_folder, de ...@@ -444,19 +444,20 @@ def custom_collate_fn(batch, max_len_specs, vocabulary, folder, specs_folder, de
def setup_parser(): def setup_parser():
out = argparse.ArgumentParser() out = argparse.ArgumentParser()
# # cluster version # # cluster version
folder_given = "naive_approach" folder_given = "Model_naive_approach"
out.add_argument('--get_vocabulary', default=f'/scratch/grzonkow/vocabulary.pkl', type=str, help="Path to load vocabulary") out.add_argument('--get_vocabulary', default=f'/scratch/grzonkow/{folder_given}/vocabulary_{folder_given}.pkl', type=str, help="Path to load vocabulary")
out.add_argument('--save_vocabulary', default=f'/scratch/grzonkow/{folder_given}/vocabulary.pkl', out.add_argument('--save_vocabulary', default=f'/scratch/grzonkow/{folder_given}/vocabulary_new_{folder_given}.pkl',
type=str, help="Path to safe vocabulary") type=str, help="Path to safe vocabulary")
# if built_Dataset, dataset does not matter # if built_Dataset, dataset does not matter
out.add_argument('--built_dataset', default=f'/scratch/grzonkow/output/', type=str, help="Path to dataset folder") out.add_argument('--built_dataset', default=f'/scratch/grzonkow/output/', type=str, help="Path to dataset folder")
out.add_argument('--built_dataset_charts', default=f'/scratch/grzonkow/{folder_given}/output/', type=str, help="Path to dataset folder")
out.add_argument('--dataset', default=None, type=str, help="Path to dataset") out.add_argument('--dataset', default=None, type=str, help="Path to dataset")
out.add_argument('--save_dataset', default=f'/scratch/grzonkow/{folder_given}/train_dataset_{folder_given}.pkl', out.add_argument('--save_dataset', default=f'/scratch/grzonkow/{folder_given}/train_dataset_{folder_given}.pkl',
type=str, help="Path to safe dataset") type=str, help="Path to safe dataset")
out.add_argument('--old_version', default=False, type=str, out.add_argument('--old_version', default=False, type=str,
help="Indicates if old version without start end tokens need to be loaded") help="Indicates if old version without start end tokens need to be loaded")
out.add_argument('--old_version_voc', default=True, type=str, out.add_argument('--old_version_voc', default=False, type=str,
help="Indicates if old version without start end tokens need to be loaded") help="Indicates if old version without start end tokens need to be loaded")
out.add_argument('--folder', default=r'/work/MLShare/StepMania/data/cleaned/allowed_meter_difference_of_2/', out.add_argument('--folder', default=r'/work/MLShare/StepMania/data/cleaned/allowed_meter_difference_of_2/',
...@@ -693,7 +694,7 @@ if __name__ == '__main__': ...@@ -693,7 +694,7 @@ if __name__ == '__main__':
f'{args.built_dataset}/all_waves/', f'{args.built_dataset}/all_waves/',
f'{args.built_dataset}/all_sample_rates/', f'{args.built_dataset}/all_sample_rates/',
f'{args.built_dataset}/all_difficulties/', f'{args.built_dataset}/all_difficulties/',
f'{args.built_dataset}/all_charts/', f'{args.built_dataset_charts}/all_charts/',
f"{args.built_dataset}/mel_specs_{win_length_list_ms[0]}/", f"{args.built_dataset}/mel_specs_{win_length_list_ms[0]}/",
f"{args.built_dataset}/mel_specs_{win_length_list_ms[1]}/", f"{args.built_dataset}/mel_specs_{win_length_list_ms[1]}/",
f"{args.built_dataset}/mel_specs_{win_length_list_ms[2]}/", f"{args.built_dataset}/mel_specs_{win_length_list_ms[2]}/",
......
#!/bin/bash #!/bin/bash
#SBATCH -t 7-00:00:00 # time limit set to 1 week, 1 day 1-00:00:00 #SBATCH -t 7-00:00:00 # time limit set to 1 week, 1 day 1-00:00:00
#SBATCH -J Naive_model # the job name #SBATCH -J Model_naive_approach # the job name
#SBATCH --mail-type=END,FAIL,TIME_LIMIT # send notification emails #SBATCH --mail-type=END,FAIL,TIME_LIMIT # send notification emails
#SBATCH -n 5 # use 5 tasks #SBATCH -n 5 # use 5 tasks
#SBATCH --cpus-per-task=1 # use 1 thread per taks #SBATCH --cpus-per-task=1 # use 1 thread per taks
...@@ -9,8 +9,8 @@ ...@@ -9,8 +9,8 @@
#SBATCH --partition=informatik-mind #SBATCH --partition=informatik-mind
#SBATCH --gpus=1 # request 1 GPU #SBATCH --gpus=1 # request 1 GPU
#SBATCH --gpu_cmode=shared # Set the GPU into shared mode, so that multiple processes can run on it #SBATCH --gpu_cmode=shared # Set the GPU into shared mode, so that multiple processes can run on it
#SBATCH --output=/scratch/grzonkow/naive_approach/model.txt # capture output #SBATCH --output=/scratch/grzonkow/Model_naive_approach/model.txt # capture output
#SBATCH --error=/scratch/grzonkow/naive_approach/err.txt # and error streams #SBATCH --error=/scratch/grzonkow/Model_naive_approach/err.txt # and error streams
...@@ -22,7 +22,8 @@ unset LD_LIBRARY_PATH ...@@ -22,7 +22,8 @@ unset LD_LIBRARY_PATH
#pip install -r requirements.txt #pip install -r requirements.txt
#pip install simfile #pip install simfile
#python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --dataset /scratch/grzonkow/train_dataset_latest.pkl --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@" #python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --dataset /scratch/grzonkow/train_dataset_latest.pkl --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@"
python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --built_dataset /scratch/grzonkow/output --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@" #python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --built_dataset /scratch/grzonkow/output --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@"
python -u main.py --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@"
#python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@" #python -u main.py --get_vocabulary /scratch/grzonkow/vocabulary.pkl --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@"
#python -u main.py --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@" #python -u main.py --processes $SLURM_NTASKS --threads $SLURM_CPUS_PER_TASK "$@"
conda deactivate conda deactivate
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment