Update server.py
This commit is contained in:
parent
e80f4d0e1e
commit
db130ab6c0
@ -1,63 +1,35 @@
|
||||
#!/usr/bin/env python
|
||||
"""Example LangChain server exposes multiple runnables (LLMs in this case)."""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import RedirectResponse
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
from langserve import add_routes
|
||||
from chain import build_chain
|
||||
|
||||
import mlflow
|
||||
# Input API
|
||||
class QueryInput(BaseModel):
|
||||
"""쿼리 입력 모델"""
|
||||
question: str = Field(..., description="사용자 질문")
|
||||
temperature: float = 0.7
|
||||
max_tokens: int = 1000
|
||||
|
||||
import getpass
|
||||
import os
|
||||
app = FastAPI()
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "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"
|
||||
os.environ["GOOGLE_API_KEY"] = "AIzaSyDIO5PFKRV-kVjNzpsrOcC-KodUMO-VGAo"
|
||||
rag_chain = build_chain()
|
||||
|
||||
mlflow.set_tracking_uri("http://localhost:5000")
|
||||
mlflow.langchain.autolog()
|
||||
|
||||
app = FastAPI(
|
||||
title="LangChain Server",
|
||||
version="1.0",
|
||||
description="Spin up a simple api server using Langchain's Runnable interfaces",
|
||||
)
|
||||
|
||||
llm = ChatGoogleGenerativeAI(
|
||||
model="gemini-2.0-flash-001",
|
||||
temperature=0,
|
||||
max_tokens=None,
|
||||
timeout=None,
|
||||
max_retries=2,
|
||||
# other params...
|
||||
)
|
||||
|
||||
openai_llm = ChatOpenAI(
|
||||
model="gpt-4o",
|
||||
temperature=0,
|
||||
max_tokens=None,
|
||||
timeout=None,
|
||||
max_retries=2,
|
||||
# api_key="...", # if you prefer to pass api key in directly instaed of using env vars
|
||||
# base_url="...",
|
||||
# organization="...",
|
||||
# other params...
|
||||
)
|
||||
|
||||
add_routes(
|
||||
app,
|
||||
llm,
|
||||
path="/gemeni",
|
||||
)
|
||||
|
||||
@app.get("/")
|
||||
async def redirect_root_to_docs():
|
||||
return RedirectResponse("/gemeni/playground")
|
||||
return RedirectResponse("/chat")
|
||||
|
||||
|
||||
# Edit this to add the chain you want to add
|
||||
add_routes(
|
||||
app,
|
||||
rag_chain,
|
||||
path="/chat",
|
||||
input_type=QueryInput
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=9000)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user