diff --git a/1/model.py b/1/model.py index 6d615fd..e1b4860 100644 --- a/1/model.py +++ b/1/model.py @@ -1,5 +1,4 @@ import triton_python_backend_utils as pb_utils -from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig import numpy as np import json @@ -30,12 +29,13 @@ class TritonPythonModel: input_text = self._get_input_value(request, "INPUT") self.logger.log_info(f"INPUT 출력:\n{input_text}") - - output = "HELLO_WORLD" + random_string = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(16)) + + output = random_string self.logger.log_info(f"OUTPUT 출력:\n{output}") # 생성된 텍스트를 Triton 출력 텐서로 변환합니다. - output_tensor = pb_utils.Tensor("text_output", np.array(output.encode('utf-8'), dtype=np.bytes_)) + output_tensor = pb_utils.Tensor("OUTPUT", np.array(output.encode('utf-8'), dtype=np.bytes_)) # 응답 객체를 생성하고 출력 텐서를 추가합니다. responses.append(pb_utils.InferenceResponse(output_tensors=[output_tensor]))