Browse Source

summary

master
Hendrik Langer 2 years ago
parent
commit
13b1863fae
  1. 6
      matrix_pygmalion_bot/bot/ai/langchain.py
  2. 2
      matrix_pygmalion_bot/bot/ai/prompts.py
  3. 4
      matrix_pygmalion_bot/main.py

6
matrix_pygmalion_bot/bot/ai/langchain.py

@ -143,7 +143,7 @@ class AI(object):
if len(conversation_memory.chat_memory.messages) > max_k*2: if len(conversation_memory.chat_memory.messages) > max_k*2:
async def make_progressive_summary(previous_summary, chat_history_text_string): async def make_progressive_summary(previous_summary, chat_history_text_string):
asyncio.sleep(0) # yield for matrix-nio await asyncio.sleep(0) # yield for matrix-nio
#self.rooms[message.room_id]["summary"] = summary_memory.predict_new_summary(conversation_memory.chat_memory.messages, previous_summary).strip() #self.rooms[message.room_id]["summary"] = summary_memory.predict_new_summary(conversation_memory.chat_memory.messages, previous_summary).strip()
summary_chain = LLMChain(llm=self.llm_summary, prompt=prompt_progressive_summary) summary_chain = LLMChain(llm=self.llm_summary, prompt=prompt_progressive_summary)
self.rooms[message.room_id]["summary"] = await summary_chain.apredict(summary=previous_summary, chat_history=chat_history_text_string) self.rooms[message.room_id]["summary"] = await summary_chain.apredict(summary=previous_summary, chat_history=chat_history_text_string)
@ -156,7 +156,7 @@ class AI(object):
#return summary #return summary
logger.info("memory progressive summary scheduled...") logger.info("memory progressive summary scheduled...")
await self.bot.schedule(self.bot.queue, make_progressive_summary, self.rooms[message.room_id]["summary"], conversation_memory.buffer) await self.bot.schedule(self.bot.queue, make_progressive_summary, self.rooms[message.room_id]["summary"], conversation_memory.buffer) #.add_done_callback(
@ -191,7 +191,7 @@ class AI(object):
#roleplay_chain = RoleplayChain(llm_chain=chain, character_name=self.bot.name, persona=self.bot.persona, scenario=self.bot.scenario, ai_name_chat=chat_ai_name, human_name_chat=chat_human_name) #roleplay_chain = RoleplayChain(llm_chain=chain, character_name=self.bot.name, persona=self.bot.persona, scenario=self.bot.scenario, ai_name_chat=chat_ai_name, human_name_chat=chat_human_name)
stop = ['<|endoftext|>', f"\n{chat_human_name}:"] stop = ['<|endoftext|>', f"\n{chat_human_name}"]
#print(f"Message is: \"{message.message}\"") #print(f"Message is: \"{message.message}\"")
output = await chain.arun({"input":message.message, "stop": stop}) output = await chain.arun({"input":message.message, "stop": stop})
output = output.replace("<BOT>", self.bot.name).replace("<USER>", message.user_name) output = output.replace("<BOT>", self.bot.name).replace("<USER>", message.user_name)

2
matrix_pygmalion_bot/bot/ai/prompts.py

@ -114,7 +114,7 @@ prompt_progressive_summary = PromptTemplate.from_template(
"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. """Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
### Instruction: ### Instruction:
Based on the provided summary and new lines of conversation, give a brief and refined final summary. Only include relevant facts and key takeaways. Skip mundane details of prior events in the final and refined summary. Based on the provided summary and new lines of conversation, give a brief and refined final summary. Include relevant facts and key takeaways. Skip mundane details of prior events in the final and refined summary.
### Input: ### Input:
Current summary: Current summary:

4
matrix_pygmalion_bot/main.py

@ -64,13 +64,13 @@ async def main() -> None:
if sys.version_info[0] == 3 and sys.version_info[1] < 11: if sys.version_info[0] == 3 and sys.version_info[1] < 11:
tasks = [] tasks = []
for bot in bots: for bot in bots:
task = asyncio.create_task(bot.connection.sync_forever(timeout=30000, full_state=True)) task = asyncio.create_task(bot.connection.sync_forever(timeout=180000, full_state=True)) # 30000
tasks.append(task) tasks.append(task)
await asyncio.gather(*tasks) await asyncio.gather(*tasks)
else: else:
async with asyncio.TaskGroup() as tg: async with asyncio.TaskGroup() as tg:
for bot in bots: for bot in bots:
task = tg.create_task(bot.connection.sync_forever(timeout=30000, full_state=True)) task = tg.create_task(bot.connection.sync_forever(timeout=180000, full_state=True)) # 30000
except Exception: except Exception:
print(traceback.format_exc()) print(traceback.format_exc())

Loading…
Cancel
Save