{ | |
"module": "keras_hub.src.models.mixtral.mixtral_causal_lm_preprocessor", | |
"class_name": "MixtralCausalLMPreprocessor", | |
"config": { | |
"name": "mixtral_causal_lm_preprocessor_2", | |
"trainable": true, | |
"dtype": { | |
"module": "keras", | |
"class_name": "DTypePolicy", | |
"config": { | |
"name": "float32" | |
}, | |
"registered_name": null | |
}, | |
"tokenizer": { | |
"module": "keras_hub.src.models.mixtral.mixtral_tokenizer", | |
"class_name": "MixtralTokenizer", | |
"config": { | |
"name": "mixtral_tokenizer", | |
"trainable": true, | |
"dtype": { | |
"module": "keras", | |
"class_name": "DTypePolicy", | |
"config": { | |
"name": "int32" | |
}, | |
"registered_name": null | |
}, | |
"config_file": "tokenizer.json", | |
"proto": null, | |
"sequence_length": null, | |
"add_bos": false, | |
"add_eos": false | |
}, | |
"registered_name": "keras_hub>MixtralTokenizer" | |
}, | |
"config_file": "preprocessor.json", | |
"sequence_length": 1024, | |
"add_start_token": true, | |
"add_end_token": true | |
}, | |
"registered_name": "keras_hub>MixtralCausalLMPreprocessor" | |
} |