File size: 1,437 Bytes
			
			| 239441b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 | {
    "module": "keras_hub.src.models.mixtral.mixtral_causal_lm_preprocessor",
    "class_name": "MixtralCausalLMPreprocessor",
    "config": {
        "name": "mixtral_causal_lm_preprocessor_2",
        "trainable": true,
        "dtype": {
            "module": "keras",
            "class_name": "DTypePolicy",
            "config": {
                "name": "float32"
            },
            "registered_name": null
        },
        "tokenizer": {
            "module": "keras_hub.src.models.mixtral.mixtral_tokenizer",
            "class_name": "MixtralTokenizer",
            "config": {
                "name": "mixtral_tokenizer",
                "trainable": true,
                "dtype": {
                    "module": "keras",
                    "class_name": "DTypePolicy",
                    "config": {
                        "name": "int32"
                    },
                    "registered_name": null
                },
                "config_file": "tokenizer.json",
                "proto": null,
                "sequence_length": null,
                "add_bos": false,
                "add_eos": false
            },
            "registered_name": "keras_hub>MixtralTokenizer"
        },
        "config_file": "preprocessor.json",
        "sequence_length": 1024,
        "add_start_token": true,
        "add_end_token": true
    },
    "registered_name": "keras_hub>MixtralCausalLMPreprocessor"
} | 
