Update config.pbtxt

This commit is contained in:
cheetahadmin 2025-11-24 00:59:57 +00:00
parent 3ba54c6ce7
commit a9c3a0815c

@ -1,75 +1,105 @@
# Triton backend to use
name: "stablelm-2-1_6b"
# Triton Backend for TransformerLLM.
backend: "python"
max_batch_size: 0
# Triton should expect as input a single string
# input of variable length named 'text_input'
input [
{
name: "text_input"
data_type: TYPE_STRING
dims: [ -1 ]
dims: [ 1 ]
},
{
name: "max_length"
data_type: TYPE_INT32
dims: [ 1 ]
optional: true
},
{
name: "max_new_tokens"
data_type: TYPE_INT32
dims: [ 1 ]
optional: true
},
{
name: "do_sample"
data_type: TYPE_BOOL
dims: [ 1 ]
optional: true
},
{
name: "top_k"
data_type: TYPE_INT32
dims: [ 1 ]
optional: true
},
{
name: "top_p"
data_type: TYPE_FP32
dims: [ 1 ]
optional: true
},
{
name: "temperature"
data_type: TYPE_FP32
dims: [ 1 ]
optional: true
},
{
name: "repetition_penalty"
data_type: TYPE_FP32
dims: [ 1 ]
optional: true
},
{
name: "stream"
data_type: TYPE_BOOL
dims: [ 1 ]
optional: true
}
]
# Triton should expect to respond with a single string
# output of variable length named 'text_output'
output [
{
name: "text_output"
data_type: TYPE_STRING
dims: [ -1 ]
dims: [ 1 ]
}
]
parameters: [
@ -78,15 +108,24 @@ parameters: [
value: {string_value: "/cheetah/input/model/groupuser/stablelm-2-1_6b"}
},
{
key: "enable_inference_trace",
value: {string_value: "True"}
key: "is_adapter_model",
value: {string_value: "false"}
},
{
key: "adapter_model_path",
value: {string_value: ""}
},
{
key: "quantization",
value: {string_value: "none"}
}
]
instance_group [
{
kind: KIND_GPU,
kind: KIND_AUTO,
count: 1
}
]