Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ You'll always get credit for your work.

```bash
$ cd mycli
$ uv sync --extra dev --extra ssh
$ uv sync --extra dev
```

We've just created a virtual environment and installed all the dependencies
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ If you already know how to install Python packages, then you can install it via
You might need sudo on Linux.

```bash
pip install -U mycli
pip install -U 'mycli[all]'
```

or
Expand Down
5 changes: 5 additions & 0 deletions changelog.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
Upcoming (TBD)
==============

Features
--------
* Make LLM dependencies an optional extra.


Internal
--------
* Add mypy to Pull Request template.
Expand Down
23 changes: 21 additions & 2 deletions doc/llm.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,22 @@ Alias: `\ai` works the same as `\llm`.

## Quick Start

1) Configure your API key (only needed for remote providers like OpenAI):
1) Make sure mycli is installed with the `[llm]` extras, like
```bash
pip install 'mycli[llm]'
```
or that the `llm` dependency is installed separately:
```bash
pip install llm
```

2) From the mycli prompt, configure your API key (only needed for remote providers like OpenAI):

```text
\llm keys set openai
```

2) Ask a question. The response’s SQL (inside a ```sql fenced block) is extracted and pre-filled at the prompt:
3) Ask a question. The response’s SQL (inside a ```sql fenced block) is extracted and pre-filled at the prompt:

```text
World> \llm "Capital of India?"
Expand Down Expand Up @@ -165,6 +174,16 @@ World> \llm templates show mycli-llm-template
- Data sent: Contextual questions send schema (table/column names and types) and a single sample row per table. Review your data sensitivity policies before using remote models; prefer local models (such as ollama) if needed.
- Help: Running `\llm` with no arguments shows a short usage message.

## Turning Off LLM Support

To turn off LLM support even when the `llm` dependency is installed, set the `MYCLI_LLM_OFF` environment variable:
```bash
export MYCLI_LLM_OFF=1
```

This may be desirable for faster startup times.


---

## Learn More
Expand Down
56 changes: 49 additions & 7 deletions mycli/packages/special/llm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import contextlib
import functools
import io
import logging
import os
Expand All @@ -10,15 +11,30 @@
from typing import Optional, Tuple

import click
import llm
from llm.cli import cli

try:
if not os.environ.get('MYCLI_LLM_OFF'):
import llm

LLM_IMPORTED = True
else:
LLM_IMPORTED = False
except ImportError:
LLM_IMPORTED = False
try:
if not os.environ.get('MYCLI_LLM_OFF'):
from llm.cli import cli

LLM_CLI_IMPORTED = True
else:
LLM_CLI_IMPORTED = False
except ImportError:
LLM_CLI_IMPORTED = False

from mycli.packages.special.main import Verbosity, parse_special_command

log = logging.getLogger(__name__)

LLM_CLI_COMMANDS = list(cli.commands.keys())
MODELS = {x.model_id: None for x in llm.get_models()}
LLM_TEMPLATE_NAME = "mycli-llm-template"


Expand Down Expand Up @@ -67,7 +83,7 @@ def build_command_tree(cmd):
if isinstance(cmd, click.Group):
for name, subcmd in cmd.commands.items():
if cmd.name == "models" and name == "default":
tree[name] = MODELS
tree[name] = {x.model_id: None for x in llm.get_models()}
else:
tree[name] = build_command_tree(subcmd)
else:
Expand All @@ -76,7 +92,7 @@ def build_command_tree(cmd):


# Generate the command tree for autocompletion
COMMAND_TREE = build_command_tree(cli) if cli else {}
COMMAND_TREE = build_command_tree(cli) if LLM_CLI_IMPORTED is True else {}


def get_completions(tokens, tree=COMMAND_TREE):
Expand Down Expand Up @@ -120,7 +136,25 @@ def __init__(self, results=None):
# Plugins directory
# https://llm.datasette.io/en/stable/plugins/directory.html
"""

NEED_DEPENDENCIES = """
To enable LLM features you need to install mycli with LLM support:

pip install 'mycli[llm]'

or

pip install 'mycli[all]'

or install LLM libraries separately

pip install llm

This is required to use the \\llm command.
"""

_SQL_CODE_FENCE = r"```sql\n(.*?)\n```"

PROMPT = """
You are a helpful assistant who is a MySQL expert. You are embedded in a mysql
cli tool called mycli.
Expand Down Expand Up @@ -159,8 +193,16 @@ def ensure_mycli_template(replace=False):
return


@functools.cache
def cli_commands() -> list[str]:
return list(cli.commands.keys())


def handle_llm(text, cur) -> Tuple[str, Optional[str], float]:
_, verbosity, arg = parse_special_command(text)
if not LLM_IMPORTED:
output = [(None, None, None, NEED_DEPENDENCIES)]
raise FinishIteration(output)
if not arg.strip():
output = [(None, None, None, USAGE)]
raise FinishIteration(output)
Expand All @@ -176,7 +218,7 @@ def handle_llm(text, cur) -> Tuple[str, Optional[str], float]:
capture_output = False
use_context = False
restart = True
elif parts and parts[0] in LLM_CLI_COMMANDS:
elif parts and parts[0] in cli_commands():
capture_output = False
use_context = False
elif parts and parts[0] == "--help":
Expand Down
17 changes: 17 additions & 0 deletions mycli/packages/special/main.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,18 @@
from collections import namedtuple
from enum import Enum
import logging
import os
from typing import Callable

try:
if not os.environ.get('MYCLI_LLM_OFF'):
import llm # noqa: F401

LLM_IMPORTED = True
else:
LLM_IMPORTED = False
except ImportError:
LLM_IMPORTED = False
from pymysql.cursors import Cursor

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -179,3 +189,10 @@ def quit_(*_args):
@special_command("\\G", "\\G", "Display current query results vertically.", arg_type=ArgType.NO_QUERY, case_sensitive=True)
def stub():
raise NotImplementedError


if LLM_IMPORTED:

@special_command("\\llm", "\\ai", "Interrogate LLM.", arg_type=ArgType.RAW_QUERY, case_sensitive=True)
def llm_stub():
raise NotImplementedError
15 changes: 12 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ dependencies = [
"pyperclip >= 1.8.1",
"pycryptodomex",
"pyfzf >= 0.3.1",
"llm>=0.19.0",
"setuptools", # Required by llm commands to install models
"pip",
]

[build-system]
Expand All @@ -35,6 +32,15 @@ build-backend = "setuptools.build_meta"

[project.optional-dependencies]
ssh = ["paramiko", "sshtunnel"]
llm = [
"llm>=0.19.0",
"setuptools", # Required by llm commands to install models
"pip",
]
all = [
"mycli[ssh]",
"mycli[llm]",
]
dev = [
"behave>=1.2.6",
"coverage>=7.2.7",
Expand All @@ -46,6 +52,9 @@ dev = [
"pdbpp>=0.10.3",
"paramiko",
"sshtunnel",
"llm>=0.19.0",
"setuptools", # Required by llm commands to install models
"pip",
]

[project.scripts]
Expand Down
1 change: 1 addition & 0 deletions test/features/fixture_data/help_commands.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
| \fd | \fd [name] | Delete a favorite query. |
| \fs | \fs name query | Save a favorite query. |
| \l | \l | List databases. |
| \llm | \ai | Interrogate LLM. |
| \once | \o [-o] filename | Append next result to an output file (overwrite using -o). |
| \pipe_once | \| command | Send next result to a subprocess. |
| \timing | \t | Toggle timing of commands. |
Expand Down
3 changes: 3 additions & 0 deletions test/features/steps/crud_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ def step_see_prompt(context):
@then("we see help output")
def step_see_help(context):
for expected_line in context.fixture_data["help_commands.txt"]:
# in case tests are run without extras
if 'LLM' in expected_line:
continue
wrappers.expect_exact(context, expected_line, timeout=1)


Expand Down