-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlpu.py
51 lines (35 loc) · 1.43 KB
/
lpu.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
import uvicorn
import os
from langchain.chains import LLMChain # Update with the correct import based on your langchain package
from langchain.prompts import PromptTemplate # Update with the correct import based on your langchain package
from langchain_groq import ChatGroq # Update with the correct import based on your langchain package
class UserRequest(BaseModel):
query: str
content: str
app = FastAPI()
@app.get("/")
async def root():
return {"message": "plswork!"}
@app.post("/route/")
async def process_request(user_request: UserRequest):
llm = ChatGroq(groq_api_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", model_name='mixtral-8x7b-32768')
query = user_request.query
content = user_request.content
prompt_template = """ You are a professional recruiter who specializes in cultivating talent, you are very knowledgable about all types of jobs. Answer the question
based on the context below.
Context: {content}
Question: {query}
"""
# Define the prompt structure
prompt = PromptTemplate(
input_variables=["content", "query"],
template=prompt_template,
)
llm_chain = LLMChain(llm=llm, prompt=prompt)
# Pass the context and question to the Langchain chain
result_chain = llm_chain.invoke({"content": content, "query": query})
return result_chain
if __name__ == "__main__":
uvicorn.run(app)