File size: 181 Bytes
bbfa6f6
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
model_meta {
    framework_type: PYTHON3
}
runtime_conf {
    enable_batching: true
    batching_type: "matx_inference"
    max_batching_time_ms: 2000
    allowed_batch_sizes: [1]
}