From 1729e8b240475655c9df12cd0f90784eb043c21c Mon Sep 17 00:00:00 2001 From: Pierre-Olivier Mercier Date: Fri, 7 Feb 2025 17:39:08 +0100 Subject: [PATCH 1/3] Add openai module --- Dockerfile | 2 +- modules/openai.py | 54 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 modules/openai.py diff --git a/Dockerfile b/Dockerfile index 4fb3fc6..a027b9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ WORKDIR /usr/src/app COPY requirements.txt /usr/src/app/ RUN apk add --no-cache bash build-base capstone-dev mandoc-doc man-db w3m youtube-dl aspell aspell-fr && \ pip install --no-cache-dir -r requirements.txt && \ - pip install bs4 capstone dnspython && \ + pip install bs4 capstone dnspython openai && \ apk del build-base capstone-dev && \ ln -s /var/lib/nemubot/home /home/nemubot diff --git a/modules/openai.py b/modules/openai.py new file mode 100644 index 0000000..2e0529b --- /dev/null +++ b/modules/openai.py @@ -0,0 +1,54 @@ +"""Perform requests to openai""" + +# PYTHON STUFFS ####################################################### + +from openai import OpenAI + +from nemubot import context +from nemubot.hooks import hook + +from nemubot.module.more import Response + + +# LOADING ############################################################# + +CLIENT = None +MODEL = "gpt-4" + +def load(context): + global CLIENT + if not context.config or "apikey" not in context.config: + raise ImportError ("You need a OpenAI API key in order to use " + "this module. Add it to the module configuration: " + "\n") + CLIENT = OpenAI( + base_url=context.config["endpoint"], + api_key=context.config["apikey"], + ) + + if "model" in context.config: + MODEL = context.config["model"] + + +# MODULE INTERFACE #################################################### + +@hook.ask() +def parseask(msg): + chat_completion = CLIENT.chat.completions.create( + messages=[ + { + "role": "system", + "content": "You are a kind multilingual assistant. Respond to the user request in 255 characters maximum. Be conscise, go directly to the point. Never add useless terms.", + }, + { + "role": "user", + "content": msg.message, + } + ], + model=MODEL, + ) + + return Response(chat_completion.choices[0].message.content, + msg.channel, + msg.frm) From 23f043673fdade2a753ebbc3484c35f8773c390b Mon Sep 17 00:00:00 2001 From: Pierre-Olivier Mercier Date: Fri, 7 Feb 2025 17:39:08 +0100 Subject: [PATCH 2/3] Add openai module --- Dockerfile | 2 +- modules/openai.py | 60 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 modules/openai.py diff --git a/Dockerfile b/Dockerfile index 4fb3fc6..a027b9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ WORKDIR /usr/src/app COPY requirements.txt /usr/src/app/ RUN apk add --no-cache bash build-base capstone-dev mandoc-doc man-db w3m youtube-dl aspell aspell-fr && \ pip install --no-cache-dir -r requirements.txt && \ - pip install bs4 capstone dnspython && \ + pip install bs4 capstone dnspython openai && \ apk del build-base capstone-dev && \ ln -s /var/lib/nemubot/home /home/nemubot diff --git a/modules/openai.py b/modules/openai.py new file mode 100644 index 0000000..1e3efaa --- /dev/null +++ b/modules/openai.py @@ -0,0 +1,60 @@ +"""Perform requests to openai""" + +# PYTHON STUFFS ####################################################### + +from openai import OpenAI + +from nemubot import context +from nemubot.hooks import hook + +from nemubot.module.more import Response + + +# LOADING ############################################################# + +CLIENT = None +MODEL = "gpt-4" +ENDPOINT = None + +def load(context): + global CLIENT, ENDPOINT, MODEL + if not context.config or ("apikey" not in context.config and "endpoint" not in context.config): + raise ImportError ("You need a OpenAI API key in order to use " + "this module. Add it to the module configuration: " + "\n") + kwargs = { + "api_key": context.config["apikey"] or "", + } + + if "endpoint" in context.config: + ENDPOINT = context.config["endpoint"] + kwargs["base_url"] = ENDPOINT + + CLIENT = OpenAI(**kwargs) + + if "model" in context.config: + MODEL = context.config["model"] + + +# MODULE INTERFACE #################################################### + +@hook.ask() +def parseask(msg): + chat_completion = CLIENT.chat.completions.create( + messages=[ + { + "role": "system", + "content": "You are a kind multilingual assistant. Respond to the user request in 255 characters maximum. Be conscise, go directly to the point. Never add useless terms.", + }, + { + "role": "user", + "content": msg.message, + } + ], + model=MODEL, + ) + + return Response(chat_completion.choices[0].message.content, + msg.channel, + msg.frm) From ea0ec42a4b152743b654d3ef3af61a7bd5077681 Mon Sep 17 00:00:00 2001 From: Pierre-Olivier Mercier Date: Fri, 7 Feb 2025 21:38:11 +0100 Subject: [PATCH 3/3] openai: Add commands list_models and set_model --- modules/openai.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/modules/openai.py b/modules/openai.py index 1e3efaa..b9b6e21 100644 --- a/modules/openai.py +++ b/modules/openai.py @@ -6,6 +6,7 @@ from openai import OpenAI from nemubot import context from nemubot.hooks import hook +from nemubot.tools import web from nemubot.module.more import Response @@ -39,6 +40,32 @@ def load(context): # MODULE INTERFACE #################################################### +@hook.command("list_models", + help="list available LLM") +def cmd_listllm(msg): + llms = web.getJSON(ENDPOINT + "/models", timeout=6) + return Response(message=[m for m in map(lambda i: i["id"], llms["data"])], title="Here is the available models", channel=msg.channel) + + +@hook.command("set_model", + help="Set the model to use when talking to nemubot") +def cmd_setllm(msg): + if len(msg.args) != 1: + raise IMException("Indicate 1 model to use") + + wanted_model = msg.args[0] + + llms = web.getJSON(ENDPOINT + "/models", timeout=6) + for model in llms["data"]: + if wanted_model == model["id"]: + break + else: + raise IMException("Unable to set such model: unknown") + + MODEL = wanted_model + return Response("New model in use: " + wanted_model, channel=msg.channel) + + @hook.ask() def parseask(msg): chat_completion = CLIENT.chat.completions.create(