rag-agent-soo/app/rag_chain.py
2025-04-21 02:13:39 +00:00

128 lines
4.0 KiB
Python

# rag_chain.py
import os
import weaviate
from weaviate import Client
from weaviate import WeaviateClient
from weaviate.connect import ConnectionParams
from weaviate.auth import AuthApiKey
from weaviate.classes.init import Auth
from langchain.vectorstores import Weaviate
from langchain.chains import RetrievalQA
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import OpenAIEmbeddings
def build_rag_chain():
# 1. Weaviate 클라이언트
auth = weaviate.AuthApiKey(api_key="01js3q6y7twaxccm5dbh3se9bt")
# client = weaviate.connect_to_weaviate_cloud(cluster_url="http://183.111.96.67:32668",
# auth_credentials=Auth.api_key("01js3q6y7twaxccm5dbh3se9bt"),
# headers={
# "X-OpenAI-Api-Key": "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" # 필요할 경우
# })
OPENAI_API_KEY="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"
client3 = weaviate.Client(
url="http://183.111.96.67:32668", # 예: "http://183.111.96.67:32668"
auth_client_secret=auth
)
# client = Client(
# connection_params=ConnectionParams.from_http(
# host="183.111.96.67", # 도메인 or IP
# port=32668, # 포트
# secure=False, # HTTP면 False, HTTPS면 True
# auth_credentials=auth
# )
# )
# connection_params = {
# "http": {
# "host": "183.111.96.67",
# "port": 32668,
# "secure": False
# },
# "grpc": {
# "host": "183.111.96.67",
# "port": 32619,
# "secure": False
# },
# "auth_credentials": auth
# }
http_config = {
"host": "183.111.96.67",
"port": 32668,
"secure": False
}
grpc_config = {
"host": "183.111.96.67",
"port": 32619,
"secure": False
}
# connection_params = ConnectionParams(
# http=HttpConfig(
# host="183.111.96.67",
# port=32668,
# secure=False),
# grpc=GrpcConfig(
# host="183.111.96.67",
# port=32619,
# secure=False),
# auth_credentials=auth_config
# )
connection_params = ConnectionParams(
http=http_config,
grpc=grpc_config,
auth_credentials=auth
)
client = WeaviateClient(connection_params=connection_params)
client2 = weaviate.connect_to_custom(
http_host="183.111.96.67",
http_port=32668,
grpc_host="183.111.96.67",
http_secure=False,
grpc_port=32619,
grpc_secure=False,
auth_credentials=auth, # 인증이 필요 없으면 생략 가능
headers={"X-OpenAI-Api-Key": OPENAI_API_KEY} # 필요시
)
if client.is_ready():
print("Weaviate 연결 성공!")
else:
print("연결 실패. 서버 상태를 확인하세요.")
if client2.is_ready():
print("Weaviate 2 연결 성공!")
else:
print("연결 실패 2. 서버 상태를 확인하세요.")
if client3.is_ready():
print("Weaviate 3 연결 성공!")
else:
print("연결 실패 3. 서버 상태를 확인하세요.")
# 2. 벡터스토어
vectorstore = Weaviate(
client=client3,
index_name="LangDocs",
text_key="text",
embedding=OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
)
# 3. HuggingFace LLM (예: mistralai/Mistral-7B-Instruct-v0.2)
llm = ChatOpenAI(temperature=0, openai_api_key=OPENAI_API_KEY)
retriever = vectorstore.as_retriever()
# 4. RetrievalQA chain 구성
qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
client.close()
return qa_chain