Compare commits

...

2 Commits

  1. 6
      matrix_pygmalion_bot/bot/ai/langchain.py

6
matrix_pygmalion_bot/bot/ai/langchain.py

@ -203,7 +203,7 @@ class AI(object):
chat_human_name = "### Human"
elif self.llm_chat_model.startswith('pygmalion'):
prompt_chat = prompt_pygmalion
#chat_human_name = "You"
chat_human_name = "You"
elif self.llm_chat_model.startswith('koboldai'):
prompt_chat = prompt_koboldai
else:
@ -243,8 +243,8 @@ class AI(object):
tmp_prompt_text = prompt.format(chat_history=conversation_memory.buffer, input=message.content)
prompt_len = self.llm_chat.get_num_tokens(tmp_prompt_text)
if prompt_len+256 > 2000:
logger.warning(f"Prompt too large. Estimated {prompt_len} tokens")
if prompt_len+200 > 2048:
logger.warning(f"Prompt too large. Estimated {prompt_len} tokens. Summarizing...")
await reply_fn(f"<WARNING> Prompt too large. Estimated {prompt_len} tokens")
if i == 0:
await conversation_memory.prune_memory(conversation_memory.min_len)

Loading…
Cancel
Save