| { | |
| "alpha_contrastive_loss": 0.5, | |
| "architectures": [ | |
| "BacformerForMaskedGM" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "auto_map": { | |
| "AutoConfig": "configuration_bacformer.BacformerConfig", | |
| "AutoModel": "modeling_bacformer.BacformerModel", | |
| "AutoModelForMaskedLM": "modeling_bacformer.BacformerForMaskedGM", | |
| "AutoModelForTokenClassification": "modeling_bacformer.BacformerForProteinClassification", | |
| "AutoModelForSequenceClassification": "modeling_bacformer.BacformerForGenomeClassification" | |
| }, | |
| "batch_size": 1, | |
| "ckpt_path": null, | |
| "dataloader_num_workers": 16, | |
| "early_stopping_patience": 8, | |
| "end_token_id": 5, | |
| "eval_steps": 8000, | |
| "gradient_accumulation_steps": 8, | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 480, | |
| "id2label": { | |
| "0": "LABEL_0" | |
| }, | |
| "initializer_range": 0.02, | |
| "input_dir": "/rds/user/mw896/rds-flotolab-9X9gY1OFt4M/projects/bacformer/input-data/eval-genomes/", | |
| "intermediate_size": 1280, | |
| "is_causal_gm": false, | |
| "label2id": { | |
| "LABEL_0": 0 | |
| }, | |
| "layer_norm_eps": 1e-12, | |
| "logging_steps": 500, | |
| "lr": 0.00015, | |
| "mask_token_id": 1, | |
| "max_epochs": 10, | |
| "max_grad_norm": 2.0, | |
| "max_n_contigs": 1000, | |
| "max_n_proteins": 6000, | |
| "max_position_embeddings": 6000, | |
| "max_token_type_embeddings": 1000, | |
| "mgm_probability": 0.15, | |
| "model_type": "bacformer", | |
| "monitor_metric": "loss", | |
| "n_nodes": 1, | |
| "n_total_samples": 1203731, | |
| "num_attention_heads": 8, | |
| "num_hidden_layers": 12, | |
| "num_special_tokens": 6, | |
| "output_dir": "/rds/user/mw896/rds-flotolab-9X9gY1OFt4M/projects/bacformer/output-data/all-genomes/runs-mgm/12L-full-dataset-rotary-lr15e-5-ampere/", | |
| "pad_token_id": 0, | |
| "pretrained_model_dir": null, | |
| "problem_type": "single_label_classification", | |
| "prot_emb_token_id": 4, | |
| "protein_clusters_vocab_size": 50000, | |
| "random_state": 30, | |
| "return_attn_weights": false, | |
| "return_dict": false, | |
| "save_steps": 8000, | |
| "special_tokens_dict": { | |
| "CLS": 2, | |
| "END": 5, | |
| "MASK": 1, | |
| "PAD": 0, | |
| "PROT_EMB": 4, | |
| "SEP": 3 | |
| }, | |
| "test": false, | |
| "test_after_train": false, | |
| "torch_dtype": "bfloat16", | |
| "train_subset_prop": 1.0, | |
| "transformers_version": "4.50.3", | |
| "warmup_proportion": 0.1, | |
| "weight_decay": 0.01 | |
| } | |