Compare commits

...

11 Commits

Author SHA1 Message Date
cea0ba39bc initial commit of my fork of the code
Some checks failed
Release / Release (push) Has been cancelled
Release / Docker (push) Has been cancelled
Release / Update develop branch (push) Has been cancelled
Release / Docker pre-release (push) Has been cancelled
2025-05-10 00:19:54 -05:00
semantic-release-bot
ba6f93dc96 chore(release): 1.1.2 [skip ci]
## [1.1.2](https://github.com/The0mikkel/ollama-discord-bot/compare/v1.1.1...v1.1.2) (2025-03-13)

### Bug Fixes

* **Deps:** require audioop-lts to be installed, due to Discord.py requirement ([ae97fdc](ae97fdcce7)), closes [#10](https://github.com/The0mikkel/ollama-discord-bot/issues/10)
* **Docker:** lock Docker to use Python v3.13 ([c38e3ba](c38e3ba8c0)), closes [#10](https://github.com/The0mikkel/ollama-discord-bot/issues/10)
2025-03-13 22:15:31 +00:00
The0Mikkel
ae97fdcce7 fix(Deps): require audioop-lts to be installed, due to Discord.py requirement
This solves #10
2025-03-13 23:14:57 +01:00
The0Mikkel
c38e3ba8c0 fix(Docker): lock Docker to use Python v3.13
This solves #10
2025-03-13 23:14:24 +01:00
dependabot[bot]
622e46e22f
chore(deps-dev): bump braces from 3.0.2 to 3.0.3 (#8)
Bumps [braces](https://github.com/micromatch/braces) from 3.0.2 to 3.0.3.
- [Changelog](https://github.com/micromatch/braces/blob/master/CHANGELOG.md)
- [Commits](https://github.com/micromatch/braces/compare/3.0.2...3.0.3)

---
updated-dependencies:
- dependency-name: braces
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-27 15:31:35 +02:00
semantic-release-bot
ac930d08c4 chore(release): 1.1.1 [skip ci]
## [1.1.1](https://github.com/The0mikkel/ollama-discord-bot/compare/v1.1.0...v1.1.1) (2024-06-27)

### Bug Fixes

* **chat:** add error handling for failed message saving ([e32d5e4](e32d5e41e0))
2024-06-27 12:40:47 +00:00
The0Mikkel
12c38ec54f Merge branch 'main' of https://github.com/The0mikkel/ollama-discord-bot into main 2024-06-27 14:40:20 +02:00
The0Mikkel
e32d5e41e0 fix(chat): add error handling for failed message saving 2024-06-27 14:40:18 +02:00
semantic-release-bot
eecd0cd6a7 chore(release): 1.1.0 [skip ci]
## [1.1.0](https://github.com/The0mikkel/ollama-discord-bot/compare/v1.0.7...v1.1.0) (2024-06-27)

### Features

* **message:** add support for 2000+ charachter messages ([8244ee9](8244ee95c9))
2024-06-27 12:32:36 +00:00
The0Mikkel
bb8db4f3d7 Merge branch 'main' of https://github.com/The0mikkel/ollama-discord-bot into main 2024-06-27 14:32:09 +02:00
Ha1fdan
fd749872da
docs(readme): correct the name of the env file for the discord bot (#4) 2024-05-28 20:23:45 +02:00
7 changed files with 28 additions and 23 deletions

View File

@ -1,4 +1,4 @@
FROM python:3-alpine
FROM python:3.13-alpine
# Set environment variables to prevent Python from writing .pyc files to disk and buffering stdout and stderr
ENV PYTHONDONTWRITEBYTECODE=1

View File

@ -8,7 +8,7 @@ This Discord chatbot is built to follow the chat flow and have a conversation wi
0. Have [Ollama](https://ollama.com) and [Docker](https://docker.com) installed and running
1. Clone the repository
2. Configure the `.env` file with at least the following variables (see [Configuration](#configuration) for more options):
2. Configure the `bot.env` file with at least the following variables (see [Configuration](#configuration) for more options):
- `DISCORD_TOKEN`: The token of the discord bot (token can be obtained from the [Discord Developer Portal](https://discord.com/developers/applications))
- `ADMIN_ID`: The id of the admin user, which allows to RESET the chat
3. Run `docker-compose up -d` to start the bot

22
bot.py
View File

@ -116,7 +116,7 @@ class Bot:
# pull the model
# await self.ollama.pull(self.model)
activity = discord.Activity(name='Status', state=f'Hi, I\'m {self.bot_name.title()}! I only respond to mentions.', type=discord.ActivityType.custom)
activity = discord.Activity(name='Status', state=f'Hi, I\'m {self.bot_name.title()}. Don\'t talk to me.', type=discord.ActivityType.custom)
await self.discord.change_presence(activity=activity)
logging.info(
@ -168,19 +168,19 @@ class Bot:
# Do not respond with llm in private messages
if isinstance(message.channel, discord.DMChannel):
response = DiscordResponse(message)
if self.discord.user.mentioned_in(message):
await response.write(message, 'I am sorry, I am unable to respond in private messages.')
return
response = DiscordResponse(message)
if self.discord.user.mentioned_in(message):
await response.write(message, 'I am sorry, I am unable to respond in private messages.')
return
# Do not respond to messages that don't mention us
# Do not respond to messages that don't mention us. 0.2 is 20 percent chance to answer anyway.
if not self.discord.user.mentioned_in(message) or message.author.bot or '@everyone' in message.content or '@here' in message.content:
# don't respond to messages that don't mention us, but save it for context
await self.save_message(str(message.channel.id), self.message(message, message.content), 'user')
logging.info('Message saved for context in %s, but it was not for us', (message.channel.id))
# However, if randomly it does accept the message, and respond. There is a 0.01% chance of it happening.
if (random.random() * 1000) > 0.1:
if (random.random()* 1000) > 0.1:
return
# Clean message
@ -238,8 +238,12 @@ class Bot:
response_message = ''
data = await self.ollama.chat(model=self.model, keep_alive=-1, stream=False, messages=local_messages, options={'num_ctx': self.ctx})
response_message = data['message']['content']
await self.save_message(channel_id, response_message, 'assistant')
try:
response_message = data['message']['content']
await self.save_message(channel_id, response_message, 'assistant')
except Exception as e:
logging.error('Error saving response: %s', e)
return 'I am sorry, I am unable to respond at the moment.'
return response_message
except Exception as e:

View File

@ -44,7 +44,7 @@ services:
image: redis/redis-stack-server:latest
restart: always
volumes:
- redis:/data
- /portainer/Files/AppData/Config/discordluna/redis:/data
networks:
- redis
expose:

18
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "ollama-discord-bot",
"version": "1.0.7",
"version": "1.1.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ollama-discord-bot",
"version": "1.0.7",
"version": "1.1.2",
"license": "MIT",
"devDependencies": {
"@semantic-release/commit-analyzer": "^11.1.0",
@ -828,12 +828,12 @@
"dev": true
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@ -1605,9 +1605,9 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"

View File

@ -1,6 +1,6 @@
{
"name": "ollama-discord-bot",
"version": "1.0.7",
"version": "1.1.2",
"description": "This Discord chatbot is built to follow the chat flow and have a conversation with the user. The bot is built using the discord.py library, and the chat is stored in Redis. The number of messages to have in context is configurable.",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"

View File

@ -1,3 +1,4 @@
discord-py==2.3.2
discord-py==2.5.2
redis==5.0.4
ollama==0.2.0
audioop-lts