diff --git a/Dockerfile b/Dockerfile index 4fb3fc6..a027b9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ WORKDIR /usr/src/app COPY requirements.txt /usr/src/app/ RUN apk add --no-cache bash build-base capstone-dev mandoc-doc man-db w3m youtube-dl aspell aspell-fr && \ pip install --no-cache-dir -r requirements.txt && \ - pip install bs4 capstone dnspython && \ + pip install bs4 capstone dnspython openai && \ apk del build-base capstone-dev && \ ln -s /var/lib/nemubot/home /home/nemubot diff --git a/modules/openai.py b/modules/openai.py new file mode 100644 index 0000000..2e0529b --- /dev/null +++ b/modules/openai.py @@ -0,0 +1,54 @@ +"""Perform requests to openai""" + +# PYTHON STUFFS ####################################################### + +from openai import OpenAI + +from nemubot import context +from nemubot.hooks import hook + +from nemubot.module.more import Response + + +# LOADING ############################################################# + +CLIENT = None +MODEL = "gpt-4" + +def load(context): + global CLIENT + if not context.config or "apikey" not in context.config: + raise ImportError ("You need a OpenAI API key in order to use " + "this module. Add it to the module configuration: " + "\n") + CLIENT = OpenAI( + base_url=context.config["endpoint"], + api_key=context.config["apikey"], + ) + + if "model" in context.config: + MODEL = context.config["model"] + + +# MODULE INTERFACE #################################################### + +@hook.ask() +def parseask(msg): + chat_completion = CLIENT.chat.completions.create( + messages=[ + { + "role": "system", + "content": "You are a kind multilingual assistant. Respond to the user request in 255 characters maximum. Be conscise, go directly to the point. Never add useless terms.", + }, + { + "role": "user", + "content": msg.message, + } + ], + model=MODEL, + ) + + return Response(chat_completion.choices[0].message.content, + msg.channel, + msg.frm)