Browse Source

change local ip

master
Hendrik Langer 1 year ago
parent
commit
7da91085af
  1. 4
      matrix_pygmalion_bot/bot/ai/langchain.py
  2. 2
      matrix_pygmalion_bot/bot/wrappers/langchain_koboldcpp.py

4
matrix_pygmalion_bot/bot/ai/langchain.py

@ -93,8 +93,8 @@ class AI(object):
self.max_context = 2048
from ..wrappers.langchain_koboldcpp import KoboldCpp
self.llm_chat = KoboldCpp(temperature=self.bot.temperature, endpoint_url="http://172.16.85.10:5001/api/latest/generate", max_context=self.max_context, stop=['<|endoftext|>'], verbose=True)
self.llm_summary = KoboldCpp(temperature=0.7, repeat_penalty=1.15, top_k = 20, top_p= 0.9, endpoint_url="http://172.16.85.10:5001/api/latest/generate", max_context=self.max_context, stop=['<|endoftext|>'], max_tokens=512, verbose=True)
self.llm_chat = KoboldCpp(temperature=self.bot.temperature, endpoint_url="http://172.16.33.10:5001/api/latest/generate", max_context=self.max_context, stop=['<|endoftext|>'], verbose=True)
self.llm_summary = KoboldCpp(temperature=0.7, repeat_penalty=1.15, top_k = 20, top_p= 0.9, endpoint_url="http://172.16.33.10:5001/api/latest/generate", max_context=self.max_context, stop=['<|endoftext|>'], max_tokens=512, verbose=True)
self.llm_chat_model = "pygmalion-7b"
self.llm_summary_model = "vicuna-13b"
self.text_wrapper = text_wrapper

2
matrix_pygmalion_bot/bot/wrappers/langchain_koboldcpp.py

@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
class KoboldCpp(LLM):
"""KoboldCpp LLM wrapper for testing purposes."""
endpoint_url: str = "http://172.16.85.10:5001/api/latest/generate"
endpoint_url: str = "http://172.16.33.10:5001/api/latest/generate"
temperature: Optional[float] = 0.7
"""The temperature to use for sampling."""

Loading…
Cancel
Save