improved config json
This commit is contained in:
parent
22430346d7
commit
dcc41fef6b
67
config.json
67
config.json
@ -1,5 +1,31 @@
|
|||||||
[
|
{
|
||||||
|
"matrix":
|
||||||
{
|
{
|
||||||
|
"server": "https://matrix.org",
|
||||||
|
"username": "@USERNAME:SERVER.TLD",
|
||||||
|
"password": "PASSWORD",
|
||||||
|
"channels":
|
||||||
|
[
|
||||||
|
"#channel1:SERVER.TLD",
|
||||||
|
"#channel2:SERVER.TLD",
|
||||||
|
"#channel3:SERVER.TLD",
|
||||||
|
"!ExAmPleOfApRivAtErOoM:SERVER.TLD"
|
||||||
|
],
|
||||||
|
"admins":
|
||||||
|
[
|
||||||
|
"admin_nick1",
|
||||||
|
"admin_nick2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ollama":
|
||||||
|
{
|
||||||
|
"api_base": "http://localhost:11434",
|
||||||
|
"options":
|
||||||
|
{
|
||||||
|
"temperature": 0.8,
|
||||||
|
"top_p": 0.7,
|
||||||
|
"repeat_penalty": 1.2
|
||||||
|
},
|
||||||
"models":
|
"models":
|
||||||
{
|
{
|
||||||
"llama3": "llama3:8b-instruct-q5_K_M",
|
"llama3": "llama3:8b-instruct-q5_K_M",
|
||||||
@ -11,40 +37,9 @@
|
|||||||
"dolphin-mistral": "dolphin-mistral:7b-v2.8-q5_K_M",
|
"dolphin-mistral": "dolphin-mistral:7b-v2.8-q5_K_M",
|
||||||
"dolphin-llama3": "dolphin-llama3:8b-v2.9-q5_K_M",
|
"dolphin-llama3": "dolphin-llama3:8b-v2.9-q5_K_M",
|
||||||
"llama3.1": "llama3.1:8b-instruct-q5_K_M"
|
"llama3.1": "llama3.1:8b-instruct-q5_K_M"
|
||||||
|
|
||||||
},
|
},
|
||||||
|
"default_model": "llama3.1",
|
||||||
"default_model": "llama3.1"
|
"prompt": ["you are ", ". roleplay and speak in the first person and never break character. keep your responses brief and to the point."],
|
||||||
|
"personality": "a minimalist AI assistant who provides longer responses when requested"
|
||||||
},
|
|
||||||
{
|
|
||||||
"server": "https://matrix.org",
|
|
||||||
"username": "@USERNAME:SERVER.TLD",
|
|
||||||
"password": "PASSWORD",
|
|
||||||
|
|
||||||
"channels":
|
|
||||||
[
|
|
||||||
"#channel1:SERVER.TLD",
|
|
||||||
"#channel2:SERVER.TLD",
|
|
||||||
"#channel3:SERVER.TLD",
|
|
||||||
"!ExAmPleOfApRivAtErOoM:SERVER.TLD"
|
|
||||||
],
|
|
||||||
|
|
||||||
"personality": "a minimalist AI assistant who provides longer responses when requested",
|
|
||||||
"admins":
|
|
||||||
[
|
|
||||||
"admin_nick1",
|
|
||||||
"admin_nick2"
|
|
||||||
]
|
|
||||||
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"api_base": "http://localhost:11434",
|
|
||||||
"options":
|
|
||||||
{
|
|
||||||
"temperature": 0.8,
|
|
||||||
"top_p": 0.7,
|
|
||||||
"repeat_penalty": 1.2
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
}
|
@ -19,8 +19,10 @@ class ollamarama:
|
|||||||
config = json.load(f)
|
config = json.load(f)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
self.server, self.username, self.password, self.channels, self.default_personality, self.admins = config[1].values()
|
self.server, self.username, self.password, self.channels, self.admins = config['matrix'].values()
|
||||||
self.api_url = config[2]['api_base'] + "/api/chat"
|
|
||||||
|
self.api_url = config['ollama']['api_base'] + "/api/chat"
|
||||||
|
self.default_personality = config['ollama']['personality']
|
||||||
self.personality = self.default_personality
|
self.personality = self.default_personality
|
||||||
|
|
||||||
self.client = AsyncClient(self.server, self.username)
|
self.client = AsyncClient(self.server, self.username)
|
||||||
@ -32,14 +34,14 @@ class ollamarama:
|
|||||||
self.messages = {}
|
self.messages = {}
|
||||||
|
|
||||||
#prompt parts
|
#prompt parts
|
||||||
self.prompt = ("you are ", ". roleplay and speak in the first person and never break character. keep your responses brief and to the point.")
|
self.prompt = config['ollama']['prompt']
|
||||||
|
|
||||||
self.models = config[0]['models']
|
self.models = config['ollama']['models']
|
||||||
#set model
|
#set model
|
||||||
self.default_model = self.models[config[0]['default_model']]
|
self.default_model = self.models[config['ollama']['default_model']]
|
||||||
self.model = self.default_model
|
self.model = self.default_model
|
||||||
|
|
||||||
self.temperature, self.top_p, self.repeat_penalty = config[2]['options'].values()
|
self.temperature, self.top_p, self.repeat_penalty = config['ollama']['options'].values()
|
||||||
self.defaults = {
|
self.defaults = {
|
||||||
"temperature": self.temperature,
|
"temperature": self.temperature,
|
||||||
"top_p": self.top_p,
|
"top_p": self.top_p,
|
||||||
@ -182,7 +184,7 @@ class ollamarama:
|
|||||||
with open(self.config_file, "r") as f:
|
with open(self.config_file, "r") as f:
|
||||||
config = json.load(f)
|
config = json.load(f)
|
||||||
f.close()
|
f.close()
|
||||||
self.models = config[0]['models']
|
self.models = config['ollama']['models']
|
||||||
if message == ".models":
|
if message == ".models":
|
||||||
current_model = f"Current model: {self.model}\nAvailable models: {', '.join(sorted(list(self.models)))}"
|
current_model = f"Current model: {self.model}\nAvailable models: {', '.join(sorted(list(self.models)))}"
|
||||||
await self.send_message(room_id, current_model)
|
await self.send_message(room_id, current_model)
|
||||||
|
Loading…
Reference in New Issue
Block a user