Compare commits

..

No commits in common. "cea0ba39bcd7fd5d3e0cd5d62b74857c3ca1b08b" and "8244ee95c9bed60a5854cc456c88b8bd8a2eae12" have entirely different histories.

7 changed files with 23 additions and 28 deletions

View File

@ -1,4 +1,4 @@
FROM python:3.13-alpine
FROM python:3-alpine
# Set environment variables to prevent Python from writing .pyc files to disk and buffering stdout and stderr
ENV PYTHONDONTWRITEBYTECODE=1

View File

@ -8,7 +8,7 @@ This Discord chatbot is built to follow the chat flow and have a conversation wi
0. Have [Ollama](https://ollama.com) and [Docker](https://docker.com) installed and running
1. Clone the repository
2. Configure the `bot.env` file with at least the following variables (see [Configuration](#configuration) for more options):
2. Configure the `.env` file with at least the following variables (see [Configuration](#configuration) for more options):
- `DISCORD_TOKEN`: The token of the discord bot (token can be obtained from the [Discord Developer Portal](https://discord.com/developers/applications))
- `ADMIN_ID`: The id of the admin user, which allows to RESET the chat
3. Run `docker-compose up -d` to start the bot

22
bot.py
View File

@ -116,7 +116,7 @@ class Bot:
# pull the model
# await self.ollama.pull(self.model)
activity = discord.Activity(name='Status', state=f'Hi, I\'m {self.bot_name.title()}. Don\'t talk to me.', type=discord.ActivityType.custom)
activity = discord.Activity(name='Status', state=f'Hi, I\'m {self.bot_name.title()}! I only respond to mentions.', type=discord.ActivityType.custom)
await self.discord.change_presence(activity=activity)
logging.info(
@ -168,19 +168,19 @@ class Bot:
# Do not respond with llm in private messages
if isinstance(message.channel, discord.DMChannel):
response = DiscordResponse(message)
if self.discord.user.mentioned_in(message):
await response.write(message, 'I am sorry, I am unable to respond in private messages.')
return
response = DiscordResponse(message)
if self.discord.user.mentioned_in(message):
await response.write(message, 'I am sorry, I am unable to respond in private messages.')
return
# Do not respond to messages that don't mention us. 0.2 is 20 percent chance to answer anyway.
# Do not respond to messages that don't mention us
if not self.discord.user.mentioned_in(message) or message.author.bot or '@everyone' in message.content or '@here' in message.content:
# don't respond to messages that don't mention us, but save it for context
await self.save_message(str(message.channel.id), self.message(message, message.content), 'user')
logging.info('Message saved for context in %s, but it was not for us', (message.channel.id))
# However, if randomly it does accept the message, and respond. There is a 0.01% chance of it happening.
if (random.random()* 1000) > 0.1:
if (random.random() * 1000) > 0.1:
return
# Clean message
@ -238,12 +238,8 @@ class Bot:
response_message = ''
data = await self.ollama.chat(model=self.model, keep_alive=-1, stream=False, messages=local_messages, options={'num_ctx': self.ctx})
try:
response_message = data['message']['content']
await self.save_message(channel_id, response_message, 'assistant')
except Exception as e:
logging.error('Error saving response: %s', e)
return 'I am sorry, I am unable to respond at the moment.'
response_message = data['message']['content']
await self.save_message(channel_id, response_message, 'assistant')
return response_message
except Exception as e:

View File

@ -44,7 +44,7 @@ services:
image: redis/redis-stack-server:latest
restart: always
volumes:
- /portainer/Files/AppData/Config/discordluna/redis:/data
- redis:/data
networks:
- redis
expose:

18
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "ollama-discord-bot",
"version": "1.1.2",
"version": "1.0.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ollama-discord-bot",
"version": "1.1.2",
"version": "1.0.7",
"license": "MIT",
"devDependencies": {
"@semantic-release/commit-analyzer": "^11.1.0",
@ -828,12 +828,12 @@
"dev": true
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dev": true,
"dependencies": {
"fill-range": "^7.1.1"
"fill-range": "^7.0.1"
},
"engines": {
"node": ">=8"
@ -1605,9 +1605,9 @@
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"

View File

@ -1,6 +1,6 @@
{
"name": "ollama-discord-bot",
"version": "1.1.2",
"version": "1.0.7",
"description": "This Discord chatbot is built to follow the chat flow and have a conversation with the user. The bot is built using the discord.py library, and the chat is stored in Redis. The number of messages to have in context is configurable.",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"

View File

@ -1,4 +1,3 @@
discord-py==2.5.2
discord-py==2.3.2
redis==5.0.4
ollama==0.2.0
audioop-lts