Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion config/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"timezone": "America/New_York",

"serial_port": "",
"ai_provider": "lmstudio, openai, or ollama",
"ai_provider": "ollama",
"system_prompt": "You are a helpful assistant responding to mesh network chats. Respond in as few words as possible while still answering fully.",

"lmstudio_url": "http://localhost:1234/v1/chat/completions",
Expand Down
12 changes: 9 additions & 3 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ def save_config(cfg):
print(f"Timezone set to: {timezone_str}")
add_script_log(f"Timezone set to: {timezone_str}")
commands_config = safe_load_json(COMMANDS_CONFIG_FILE, {"commands": []})
cmd_list = [c.get("command", "") for c in commands_config.get("commands", [])]
print(f"Loaded {len(cmd_list)} commands from config: {', '.join(cmd_list)}")

def reload_config():
global config
Expand Down Expand Up @@ -194,6 +196,7 @@ def info_print(*args, **kwargs):
# AI Provider & Other Config Vars
# -----------------------------
AI_PROVIDER = config.get("ai_provider", "lmstudio").lower()
print(f"AI Provider configured: {AI_PROVIDER}")
SYSTEM_PROMPT = config.get("system_prompt", "You are a helpful assistant responding to mesh network chats.")
LMSTUDIO_URL = config.get("lmstudio_url", "http://localhost:1234/v1/chat/completions")
LMSTUDIO_TIMEOUT = config.get("lmstudio_timeout", 60)
Expand Down Expand Up @@ -757,14 +760,13 @@ def parse_incoming_text(text, sender_id, is_direct, channel_idx):
parts = text_lower.split()
first_word = parts[0] if parts else ""

if first_word == "ai" and len(parts) == 2:
if first_word == "ai" and len(parts) == 2 and parts[1] in ("on", "off"):
if parts[1] == "on":
active_ai_channels[channel_idx] = now
return "🤖 AI enabled for this channel."
if parts[1] == "off":
active_ai_channels.pop(channel_idx, None)
return "🤖 AI disabled for this channel."
return "Usage: ai on | ai off"

# ----------------------------
# 2. Command matching (word-based)
Expand All @@ -782,8 +784,10 @@ def parse_incoming_text(text, sender_id, is_direct, channel_idx):
cmd_text = c.get("command", "").lower()

if cmd_text and cmd_text == first_word:
dprint(f"Matched config command: '{cmd_text}'")
if "ai_prompt" in c:
prompt = c["ai_prompt"].replace("{user_input}", user_input)
dprint(f"AI prompt template: {c['ai_prompt']}, final prompt: '{prompt}'")

if AI_PROVIDER == "home_assistant" and HOME_ASSISTANT_ENABLE_PIN:
if not pin_is_valid(user_input):
Expand All @@ -794,7 +798,9 @@ def parse_incoming_text(text, sender_id, is_direct, channel_idx):
if not is_direct:
active_ai_channels[channel_idx] = now

return get_ai_response(prompt) or "🤖 [No AI response]"
ai_response = get_ai_response(prompt)
dprint(f"AI response: {ai_response}")
return ai_response or "🤖 [No AI response]"

if "response" in c:
return c["response"].replace("{user_input}", user_input)
Expand Down
Loading