added model switching
This commit is contained in:
parent
7b901310bd
commit
7e59f87493
24
README.md
24
README.md
@ -14,10 +14,9 @@ You can install it with this command:
|
||||
curl https://ollama.ai/install.sh | sh
|
||||
```
|
||||
|
||||
Once it's all set up, you'll need to download the model. You can play with the available ones and see what works best for you, but for this bot, zephyr:7b-beta-q8_0 seems to work best of the ones I've tested. To install:
|
||||
```
|
||||
ollama pull zephyr:7b-beta-q8_0
|
||||
```
|
||||
|
||||
Once it's all set up, you'll need to [download the models](https://ollama.ai/library) you want to use. You can play with the available ones and see what works best for you. Add those to the self.models dictionary. If you want to use the ones I've included, just run the commands in the models.md file. You can skip this part, and they should download when the model is switched, but the response will be delayed until it finishes downloading.
|
||||
|
||||
|
||||
You'll also need to install matrix-nio and litellm
|
||||
```
|
||||
@ -58,3 +57,20 @@ python3 launcher.py
|
||||
**.help**
|
||||
Show the built-in help menu
|
||||
|
||||
**.models**
|
||||
Show current model and available models (admin only)
|
||||
|
||||
**.model _name_**
|
||||
Set a model (admin only)
|
||||
|
||||
**.model _reset_**
|
||||
Reset to default model (admin only)
|
||||
|
||||
**.clear**
|
||||
Resets all bot history and sets default model (admin only)
|
||||
|
||||
**.auth _user_**
|
||||
Add user to admins (main admin only)
|
||||
|
||||
**.deauth _user_**
|
||||
Remove user from admins (main admin only)
|
@ -14,8 +14,11 @@ channels = ["#channel1:SERVER.TLD",
|
||||
|
||||
personality = "a helpful and thorough AI assistant who provides accurate and detailed answers without being too verbose"
|
||||
|
||||
#users authorized to use special commands
|
||||
admins = ['BOTOWNER',]
|
||||
|
||||
# create bot instance
|
||||
bot = ollamarama(server, username, password, channels, personality)
|
||||
bot = ollamarama(server, username, password, channels, personality, admins)
|
||||
|
||||
# run main function loop
|
||||
asyncio.get_event_loop().run_until_complete(bot.main())
|
35
models.md
Normal file
35
models.md
Normal file
@ -0,0 +1,35 @@
|
||||
ollama pull zephyr:7b-beta-q8_0
|
||||
|
||||
ollama pull solar
|
||||
|
||||
ollama pull mistral
|
||||
|
||||
ollama pull llama2
|
||||
|
||||
ollama pull llama2-uncensored
|
||||
|
||||
ollama pull openchat
|
||||
|
||||
ollama pull codellama:13b-instruct-q4_0
|
||||
|
||||
ollama pull dolphin2.2-mistral:7b-q8_0
|
||||
|
||||
ollama pull deepseek-coder:6.7b
|
||||
|
||||
ollama pull orca2
|
||||
|
||||
ollama pull starling-lm
|
||||
|
||||
ollama pull vicuna:13b-q4_0
|
||||
|
||||
ollama pull phi
|
||||
|
||||
ollama pull orca-mini
|
||||
|
||||
ollama pull wizardcoder:python
|
||||
|
||||
ollama pull stablelm-zephyr
|
||||
|
||||
ollama pull neural-chat
|
||||
|
||||
ollama pull mistral-openorca
|
@ -11,7 +11,7 @@ import datetime
|
||||
from litellm import completion
|
||||
|
||||
class ollamarama:
|
||||
def __init__(self, server, username, password, channels, personality):
|
||||
def __init__(self, server, username, password, channels, personality, admins):
|
||||
self.server = server
|
||||
self.username = username
|
||||
self.password = password
|
||||
@ -29,8 +29,34 @@ class ollamarama:
|
||||
#prompt parts
|
||||
self.prompt = ("you are ", ". speak in the first person and never break character.")
|
||||
|
||||
#set model, this one works best in my tests with the hardware i have, but you can try others
|
||||
self.model = "ollama/zephyr:7b-beta-q8_0"
|
||||
#put the models you want to use here, still testing various models
|
||||
self.models = {
|
||||
'zephyr': 'ollama/zephyr:7b-beta-q8_0',
|
||||
'solar': 'ollama/solar',
|
||||
'mistral': 'ollama/mistral',
|
||||
'llama2': 'ollama/llama2',
|
||||
'llama2-uncensored': 'ollama/llama2-uncensored',
|
||||
'openchat': 'ollama/openchat',
|
||||
'codellama': 'ollama/codellama:13b-instruct-q4_0',
|
||||
'dolphin-mistral': 'ollama/dolphin2.2-mistral:7b-q8_0',
|
||||
'deepseek-coder': 'ollama/deepseek-coder:6.7b',
|
||||
'orca2': 'ollama/orca2',
|
||||
'starling-lm': 'ollama/starling-lm',
|
||||
'vicuna': 'ollama/vicuna:13b-q4_0',
|
||||
'phi': 'ollama/phi',
|
||||
'orca-mini': 'ollama/orca-mini',
|
||||
'wizardcoder': 'ollama/wizardcoder:python',
|
||||
'stablelm-zephyr': 'ollama/stablelm-zephyr',
|
||||
'neural-chat': 'ollama/neural-chat',
|
||||
'mistral-openorca': 'ollama/mistral-openorca',
|
||||
|
||||
}
|
||||
#set model
|
||||
self.default_model = self.models['solar']
|
||||
self.model = self.default_model
|
||||
|
||||
#authorized users for changing models
|
||||
self.admins = admins
|
||||
|
||||
|
||||
# get the display name for a user
|
||||
@ -151,6 +177,47 @@ class ollamarama:
|
||||
#check if the message was sent after joining and not by the bot
|
||||
if message_time > self.join_time and sender != self.username:
|
||||
|
||||
#admin commands
|
||||
if message == ".admins":
|
||||
await self.send_message(room_id, f"Bot admins: {', '.join(self.admins)}")
|
||||
if sender_display in self.admins:
|
||||
#model switching
|
||||
if message.startswith(".model"):
|
||||
if message == ".models":
|
||||
await self.send_message(room_id, f'''Current model: {self.model.removeprefix('ollama/')}
|
||||
Available models: {', '.join(sorted(list(self.models)))}''')
|
||||
|
||||
if message.startswith(".model "):
|
||||
m = message.split(" ", 1)[1]
|
||||
if m != None:
|
||||
if m in self.models:
|
||||
self.model = self.models[m]
|
||||
elif m == 'reset':
|
||||
self.model = self.default_model
|
||||
await self.send_message(room_id, f"Model set to {self.model.removeprefix('ollama/')}")
|
||||
|
||||
#reset history for all users
|
||||
if message == ".clear":
|
||||
self.messages.clear()
|
||||
self.model = self.default_model
|
||||
await self.send_message(room_id, "Bot has been reset for everyone")
|
||||
|
||||
if sender_display == self.admins[0]:
|
||||
#add admins
|
||||
if message.startswith(".auth "):
|
||||
nick = message.split(" ", 1)[1].strip()
|
||||
if nick != None:
|
||||
self.admins.append(nick)
|
||||
await self.send_message(room_id, f"{nick} added to admins")
|
||||
|
||||
#remove admins
|
||||
if message.startswith(".deauth "):
|
||||
nick = message.split(" ", 1)[1].strip()
|
||||
if nick != None:
|
||||
self.admins.remove(nick)
|
||||
await self.send_message(room_id, f"{nick} removed from admins")
|
||||
|
||||
|
||||
# main AI response functionality
|
||||
if message.startswith(".ai ") or message.startswith(self.bot_id):
|
||||
m = message.split(" ", 1)
|
||||
@ -281,9 +348,12 @@ if __name__ == "__main__":
|
||||
"!ExAmPleOfApRivAtErOoM:SERVER.TLD", ] #enter the channels you want it to join here
|
||||
|
||||
personality = "a helpful and thorough AI assistant who provides accurate and detailed answers without being too verbose"
|
||||
|
||||
#list of authorized users for admin commands
|
||||
admins = ['admin_nick1', 'admin_nick2',]
|
||||
|
||||
# create bot instance
|
||||
bot = ollamarama(server, username, password, channels, personality)
|
||||
bot = ollamarama(server, username, password, channels, personality, admins)
|
||||
|
||||
# run main function loop
|
||||
asyncio.get_event_loop().run_until_complete(bot.main())
|
||||
|
Loading…
Reference in New Issue
Block a user