| # set train hyperparams | |
| unset LD_PRELOAD | |
| export HF_DATASETS_CACHE="/researchdisk/datasets_cache" | |
| export USE_TORCH=0 | |
| python3 run_mlm_flax.py \ | |
| --output_dir="./" \ | |
| --model_name_or_path="./" \ | |
| --config_name="./" \ | |
| --tokenizer_name="./" \ | |
| --dataset_filepath="/researchdisk/training_dataset_full_deduplicated" \ | |
| --max_seq_length="512" \ | |
| --pad_to_max_length \ | |
| --preprocessing_num_workers="64" \ | |
| --per_device_train_batch_size="8" \ | |
| --per_device_eval_batch_size="8" \ | |
| --adam_beta1="0.9" \ | |
| --adam_beta2="0.98" \ | |
| --adam_epsilon="1e-6" \ | |
| --learning_rate="2e-4" \ | |
| --weight_decay="0.01" \ | |
| --warmup_steps="2500" \ | |
| --overwrite_output_dir \ | |
| --num_train_epochs="4" \ | |
| --save_steps="10000" \ | |
| --eval_steps="10000" \ | |
| --logging_steps="500" \ | |
| --dtype="bfloat16" \ | |
| --adafactor \ | |
| --push_to_hub \ | |
| --hub_model_id="Finnish-NLP/roberta-large-wechsel-finnish" |