Update rag_chain.py

This commit is contained in:
localsoo 2025-04-21 01:40:31 +00:00
parent fd9a212e8e
commit 52184b1a93

@ -24,6 +24,7 @@ def build_rag_chain():
# "X-OpenAI-Api-Key": "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" # 필요할 경우 # "X-OpenAI-Api-Key": "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" # 필요할 경우
# } # }
# ) # )
OPENAI_API_KEY="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"
client = weaviate.connect_to_custom( client = weaviate.connect_to_custom(
http_host="183.111.96.67", http_host="183.111.96.67",
@ -33,7 +34,7 @@ def build_rag_chain():
grpc_port=32619, grpc_port=32619,
grpc_secure=False, grpc_secure=False,
auth_credentials=AuthApiKey("01js3q6y7twaxccm5dbh3se9bt"), # 인증이 필요 없으면 생략 가능 auth_credentials=AuthApiKey("01js3q6y7twaxccm5dbh3se9bt"), # 인증이 필요 없으면 생략 가능
headers={"X-OpenAI-Api-Key": "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"} # 필요시 headers={"X-OpenAI-Api-Key": OPENAI_API_KEY} # 필요시
) )
if client.is_ready(): if client.is_ready():
@ -47,11 +48,11 @@ def build_rag_chain():
client=client, client=client,
index_name="LangDocs", index_name="LangDocs",
text_key="text", text_key="text",
embedding=OpenAIEmbeddings() embedding=OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
) )
# 3. HuggingFace LLM (예: mistralai/Mistral-7B-Instruct-v0.2) # 3. HuggingFace LLM (예: mistralai/Mistral-7B-Instruct-v0.2)
llm = ChatOpenAI(temperature=0) llm = ChatOpenAI(temperature=0, openai_api_key=OPENAI_API_KEY)
retriever = vectorstore.as_retriever() retriever = vectorstore.as_retriever()