-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathapp.py
More file actions
33 lines (28 loc) · 1022 Bytes
/
app.py
File metadata and controls
33 lines (28 loc) · 1022 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from langchain import PromptTemplate, LLMChain
from langchain.chains import RetrievalQA
from langchain.chat_models import ChatOpenAI
import chainlit as cl
from utils import get_docsearch
template = """Input: {question}
Output: Let's think step by step."""
@cl.langchain_factory(use_async=True)
async def factory():
files = None
while files is None:
files = await cl.AskFileMessage(
content="Select File",
accept=["text/plain", "application/pdf"],
max_size_mb=20,
timeout=180,
).send()
file = files[0]
msg = cl.Message(content=f"Processing `{file.name}`...")
await msg.send()
docsearch = await cl.make_async(get_docsearch)(file)
llm_chain = RetrievalQA.from_chain_type(
ChatOpenAI(temperature=0, streaming=True),
chain_type="stuff",
retriever=docsearch.as_retriever(max_tokens_limit=4097),
)
await msg.update(content=f"`{file.name}` processed. You can now ask questions!")
return llm_chain