Vintix / config.json
artfawl's picture
Upload folder using huggingface_hub
e6e15c4 verified
{"data_dir": "/home/jovyan/foundation_datasets_new", "context_len": 8192, "trajectory_sparsity": 257, "preload": false, "ic_treshold": 0.3, "exp_treshold": 0.75, "action_emb_dim": 511, "observation_emb_dim": 511, "reward_emb_dim": 2, "hidden_dim": 1024, "transformer_depth": 20, "transformer_heads": 16, "attn_dropout": 0.0, "residual_dropout": 0.0, "normalize_qk": true, "bias": true, "parallel_residual": false, "shared_attention_norm": false, "norm_class": "LayerNorm", "mlp_class": "GptNeoxMLP", "intermediate_size": 4096, "inner_ep_pos_enc": false, "norm_acs": false, "norm_obs": true, "optimizer": "Adam", "lr": 0.0003, "betas": [0.9, 0.99], "weight_decay": 0.1, "precision": "bf16", "clip_grad": null, "grad_accum_steps": 2, "warmup_ratio": 0.005, "local_rank": 0, "epochs": 150, "batch_size": 8, "eval_every": 5, "val_max_trans": 80000, "smooth_win": 8, "save_every": 2, "save_dir": "/home/jovyan/found_model_checkpoints/frl_d20h16h1024_final6_obsscale", "load_ckpt": null, "start_epoch": 0, "seed": 5, "dataset_config_paths": ["foundation_rl/data/configs/metaworld_config.yaml", "foundation_rl/data/configs/mujoco_config.yaml", "foundation_rl/data/configs/ib_config.yaml", "foundation_rl/data/configs/bidex_config.yaml"], "project": "foundation_rl", "group": "default", "name": "frl_d20h16h1024_final6_obsscale"}