{ "_attn_implementation_autoset": true, "act_expert_config": { "_attn_implementation_autoset": true, "attention_bias": false, "attention_dropout": 0.0, "head_dim": 256, "hidden_act": "gelu_pytorch_tanh", "hidden_activation": "gelu_pytorch_tanh", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 4096, "max_position_embeddings": 8192, "model_type": "gemma", "num_attention_heads": 8, "num_hidden_layers": 18, "num_key_value_heads": 1, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "torch_dtype": "float32", "use_cache": true, "vocab_size": 257152 }, "architectures": [ "MWMForConditionalGeneration" ], "attention_implementation": "eager", "auto_map": { "AutoConfig": "configuration_mwm.MWMConfig", "AutoModel": "modeling_mwm.MWMForConditionalGeneration" }, "chunk_size": 30, "gen_expert_config": { "_attn_implementation_autoset": true, "attention_bias": false, "attention_dropout": 0.0, "head_dim": 256, "hidden_act": "gelu_pytorch_tanh", "hidden_activation": "gelu_pytorch_tanh", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 4096, "max_position_embeddings": 8192, "model_type": "gemma", "num_attention_heads": 8, "num_hidden_layers": 18, "num_key_value_heads": 1, "num_resolutions": 4, "pn": "1_2_3_4_5_6_8_10_13_16", "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "temporal_conv_kernel_size": 4, "temporal_conv_stride": 4, "torch_dtype": "float32", "use_cache": true, "vae": { "ch": 160, "share_quant_resi": 4, "test_mode": true, "vae_ckpt": "/fs-computility/efm/shared/model_weights/var/vae_ch160v4096z32.pth", "vocab_size": 4096, "z_channels": 32 }, "vocab_size": 257152 }, "language_tokenizer_path": "/fs-computility/efm/shared/model_weights/paligemma-3b-pt-224", "max_action_dim": 32, "max_state_dim": 32, "model_type": "mwm", "num_steps": 10, "pretrained_path": "/fs-computility/efm/shared/model_weights/pi0/lerobot", "proj_width": 1024, "resize_imgs_with_padding": [ 224, 224 ], "tokenizer_max_length": 48, "transformers_version": "4.51.3", "und_expert_config": { "_attn_implementation_autoset": true, "_vocab_size": 257152, "bos_token_id": 2, "eos_token_id": 1, "hidden_size": 2048, "image_token_index": 257152, "model_type": "paligemma", "pad_token_id": 0, "projection_dim": 2048, "text_config": { "_attn_implementation_autoset": false, "_name_or_path": "", "add_cross_attention": false, "architectures": null, "attention_bias": false, "attention_dropout": 0.0, "bad_words_ids": null, "begin_suppress_tokens": null, "bos_token_id": 2, "chunk_size_feed_forward": 0, "cross_attention_hidden_size": null, "decoder_start_token_id": null, "diversity_penalty": 0.0, "do_sample": false, "early_stopping": false, "encoder_no_repeat_ngram_size": 0, "eos_token_id": 1, "exponential_decay_length_penalty": null, "finetuning_task": null, "forced_bos_token_id": null, "forced_eos_token_id": null, "head_dim": 256, "hidden_act": "gelu_pytorch_tanh", "hidden_activation": "gelu_pytorch_tanh", "hidden_size": 2048, "id2label": { "0": "LABEL_0", "1": "LABEL_1" }, "initializer_range": 0.02, "intermediate_size": 16384, "is_decoder": false, "is_encoder_decoder": false, "label2id": { "LABEL_0": 0, "LABEL_1": 1 }, "length_penalty": 1.0, "max_length": 20, "max_position_embeddings": 8192, "min_length": 0, "model_type": "gemma", "no_repeat_ngram_size": 0, "num_attention_heads": 8, "num_beam_groups": 1, "num_beams": 1, "num_hidden_layers": 18, "num_image_tokens": 256, "num_key_value_heads": 1, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_scores": false, "pad_token_id": 0, "prefix": null, "problem_type": null, "pruned_heads": {}, "remove_invalid_values": false, "repetition_penalty": 1.0, "return_dict": true, "return_dict_in_generate": false, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sep_token_id": null, "suppress_tokens": null, "task_specific_params": null, "temperature": 1.0, "tf_legacy_loss": false, "tie_encoder_decoder": false, "tie_word_embeddings": true, "tokenizer_class": null, "top_k": 50, "top_p": 1.0, "torch_dtype": "float32", "torchscript": false, "typical_p": 1.0, "use_bfloat16": false, "use_cache": true, "vocab_size": 257152 }, "vision_config": { "_attn_implementation_autoset": false, "_name_or_path": "", "add_cross_attention": false, "architectures": null, "attention_dropout": 0.0, "bad_words_ids": null, "begin_suppress_tokens": null, "bos_token_id": null, "chunk_size_feed_forward": 0, "cross_attention_hidden_size": null, "decoder_start_token_id": null, "diversity_penalty": 0.0, "do_sample": false, "early_stopping": false, "encoder_no_repeat_ngram_size": 0, "eos_token_id": null, "exponential_decay_length_penalty": null, "finetuning_task": null, "forced_bos_token_id": null, "forced_eos_token_id": null, "hidden_act": "gelu_pytorch_tanh", "hidden_size": 1152, "id2label": { "0": "LABEL_0", "1": "LABEL_1" }, "image_size": 224, "intermediate_size": 4304, "is_decoder": false, "is_encoder_decoder": false, "label2id": { "LABEL_0": 0, "LABEL_1": 1 }, "layer_norm_eps": 1e-06, "length_penalty": 1.0, "max_length": 20, "min_length": 0, "model_type": "siglip_vision_model", "no_repeat_ngram_size": 0, "num_attention_heads": 16, "num_beam_groups": 1, "num_beams": 1, "num_channels": 3, "num_hidden_layers": 27, "num_image_tokens": 256, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_scores": false, "pad_token_id": null, "patch_size": 14, "prefix": null, "problem_type": null, "projection_dim": 2048, "projector_hidden_act": "gelu_fast", "pruned_heads": {}, "remove_invalid_values": false, "repetition_penalty": 1.0, "return_dict": true, "return_dict_in_generate": false, "sep_token_id": null, "suppress_tokens": null, "task_specific_params": null, "temperature": 1.0, "tf_legacy_loss": false, "tie_encoder_decoder": false, "tie_word_embeddings": true, "tokenizer_class": null, "top_k": 50, "top_p": 1.0, "torch_dtype": "float32", "torchscript": false, "typical_p": 1.0, "use_bfloat16": false, "vision_use_head": false } }, "use_cache": true, "use_world_model": true }