-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy path11-chat-memory.py
More file actions
111 lines (89 loc) · 3.37 KB
/
11-chat-memory.py
File metadata and controls
111 lines (89 loc) · 3.37 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
import json
import copy
from langchain.chains import ConversationChain
from langchain.prompts import (
PromptTemplate,
SemanticSimilarityExampleSelector
)
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
def rag_prompting(chain, example_selector, query, convo_memory=None):
memory = chain.memory
selected_examples = example_selector.select_examples({"question": query})
ns = len(selected_examples)
memory.clear()
# print(f"Examples most similar to the input: {query}")
for example in selected_examples:
question = example["question"]
answer = example["answer"]
memory.save_context(
{"input": question},
{"output": answer}
)
if convo_memory is not None:
messages = convo_memory.chat_memory.messages
for m in messages:
if m.type == 'human':
memory.chat_memory.add_user_message(m.content)
elif m.type == 'ai':
memory.chat_memory.add_ai_message(m.content)
response = chain.predict(input=query)
# delete selected examples
del memory.chat_memory.messages[:2*ns]
convo_memory = copy.deepcopy(memory)
return response, convo_memory
template = """You are a knowledgeable customer service agent from Pusat Bantuan Merdeka Belajar Kampus Merdeka (MBKM).
Use the historical conversation below to answer various questions from users.
If you don't know the answer, just say I don't know. Don't make up an answer.
The answer given must always be in Indonesian with a friendly tone.
Current conversation:
{chat_history}
Human: {input}
AI Assistant:"""
# Enable few shot example prompting -- load context examples from file
examples = json.load(open("chat_samples_nogreeting.json", "r"))
# LLM
chat_llm = ChatOpenAI(
model="gpt-3.5-turbo",
temperature=0.0,
streaming=True,
callbacks=[StreamingStdOutCallbackHandler()]
)
# Select only k number of examples in the prompt
example_selector = SemanticSimilarityExampleSelector.from_examples(
examples,
OpenAIEmbeddings(model="text-embedding-ada-002"),
FAISS,
k=3
)
memory = ConversationBufferMemory(
memory_key="chat_history",
ai_prefix="AI Assistant",
return_messages=True
)
print(memory.load_memory_variables({}))
# Chain
prompt = PromptTemplate.from_template(template)
chain = ConversationChain(
prompt=prompt,
llm=chat_llm,
memory=memory,
verbose=True
)
# query = "Halo, kamu dengan siapa? aku dengan Ghifary"
query = "Halo, ini dengan Ghif"
print(query)
response, convo_memory = rag_prompting(chain, example_selector, query)
# query = "Tolong jelaskan mengenai program MSIB (Magang dan Studi Independen Bersertifikat)."
query = "Gimana caranya daftar di progam Magang dan Studi Independent Bersertifikat (MBKM)?"
print(query)
response, convo_memory = rag_prompting(chain, example_selector, query, convo_memory=convo_memory)
# query = "Apa nama program yang saya tanyakan sebelumnya?"
query = "Tadi saya tanya tentang daftar ke program apa?"
print(query)
response, convo_memory = rag_prompting(chain, example_selector, query, convo_memory=convo_memory)