diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6659fd27..200f24bf 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,7 +24,7 @@ You'll always get credit for your work. ```bash $ cd mycli - $ uv sync --extra dev --extra ssh + $ uv sync --extra dev ``` We've just created a virtual environment and installed all the dependencies diff --git a/README.md b/README.md index 3b823ac7..bcbbabae 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ If you already know how to install Python packages, then you can install it via You might need sudo on Linux. ```bash -pip install -U mycli +pip install -U 'mycli[all]' ``` or diff --git a/changelog.md b/changelog.md index 61172cd8..3f208e87 100644 --- a/changelog.md +++ b/changelog.md @@ -1,6 +1,11 @@ Upcoming (TBD) ============== +Features +-------- +* Make LLM dependencies an optional extra. + + Internal -------- * Add mypy to Pull Request template. diff --git a/doc/llm.md b/doc/llm.md index 4c9b8268..3b76a102 100644 --- a/doc/llm.md +++ b/doc/llm.md @@ -8,13 +8,22 @@ Alias: `\ai` works the same as `\llm`. ## Quick Start -1) Configure your API key (only needed for remote providers like OpenAI): +1) Make sure mycli is installed with the `[llm]` extras, like +```bash +pip install 'mycli[llm]' +``` +or that the `llm` dependency is installed separately: +```bash +pip install llm +``` + +2) From the mycli prompt, configure your API key (only needed for remote providers like OpenAI): ```text \llm keys set openai ``` -2) Ask a question. The response’s SQL (inside a ```sql fenced block) is extracted and pre-filled at the prompt: +3) Ask a question. The response’s SQL (inside a ```sql fenced block) is extracted and pre-filled at the prompt: ```text World> \llm "Capital of India?" @@ -165,6 +174,16 @@ World> \llm templates show mycli-llm-template - Data sent: Contextual questions send schema (table/column names and types) and a single sample row per table. Review your data sensitivity policies before using remote models; prefer local models (such as ollama) if needed. - Help: Running `\llm` with no arguments shows a short usage message. +## Turning Off LLM Support + +To turn off LLM support even when the `llm` dependency is installed, set the `MYCLI_LLM_OFF` environment variable: +```bash +export MYCLI_LLM_OFF=1 +``` + +This may be desirable for faster startup times. + + --- ## Learn More diff --git a/mycli/packages/special/llm.py b/mycli/packages/special/llm.py index 4bce0980..fd8ff180 100644 --- a/mycli/packages/special/llm.py +++ b/mycli/packages/special/llm.py @@ -1,4 +1,5 @@ import contextlib +import functools import io import logging import os @@ -10,15 +11,30 @@ from typing import Optional, Tuple import click -import llm -from llm.cli import cli + +try: + if not os.environ.get('MYCLI_LLM_OFF'): + import llm + + LLM_IMPORTED = True + else: + LLM_IMPORTED = False +except ImportError: + LLM_IMPORTED = False +try: + if not os.environ.get('MYCLI_LLM_OFF'): + from llm.cli import cli + + LLM_CLI_IMPORTED = True + else: + LLM_CLI_IMPORTED = False +except ImportError: + LLM_CLI_IMPORTED = False from mycli.packages.special.main import Verbosity, parse_special_command log = logging.getLogger(__name__) -LLM_CLI_COMMANDS = list(cli.commands.keys()) -MODELS = {x.model_id: None for x in llm.get_models()} LLM_TEMPLATE_NAME = "mycli-llm-template" @@ -67,7 +83,7 @@ def build_command_tree(cmd): if isinstance(cmd, click.Group): for name, subcmd in cmd.commands.items(): if cmd.name == "models" and name == "default": - tree[name] = MODELS + tree[name] = {x.model_id: None for x in llm.get_models()} else: tree[name] = build_command_tree(subcmd) else: @@ -76,7 +92,7 @@ def build_command_tree(cmd): # Generate the command tree for autocompletion -COMMAND_TREE = build_command_tree(cli) if cli else {} +COMMAND_TREE = build_command_tree(cli) if LLM_CLI_IMPORTED is True else {} def get_completions(tokens, tree=COMMAND_TREE): @@ -120,7 +136,25 @@ def __init__(self, results=None): # Plugins directory # https://llm.datasette.io/en/stable/plugins/directory.html """ + +NEED_DEPENDENCIES = """ +To enable LLM features you need to install mycli with LLM support: + + pip install 'mycli[llm]' + +or + + pip install 'mycli[all]' + +or install LLM libraries separately + + pip install llm + +This is required to use the \\llm command. +""" + _SQL_CODE_FENCE = r"```sql\n(.*?)\n```" + PROMPT = """ You are a helpful assistant who is a MySQL expert. You are embedded in a mysql cli tool called mycli. @@ -159,8 +193,16 @@ def ensure_mycli_template(replace=False): return +@functools.cache +def cli_commands() -> list[str]: + return list(cli.commands.keys()) + + def handle_llm(text, cur) -> Tuple[str, Optional[str], float]: _, verbosity, arg = parse_special_command(text) + if not LLM_IMPORTED: + output = [(None, None, None, NEED_DEPENDENCIES)] + raise FinishIteration(output) if not arg.strip(): output = [(None, None, None, USAGE)] raise FinishIteration(output) @@ -176,7 +218,7 @@ def handle_llm(text, cur) -> Tuple[str, Optional[str], float]: capture_output = False use_context = False restart = True - elif parts and parts[0] in LLM_CLI_COMMANDS: + elif parts and parts[0] in cli_commands(): capture_output = False use_context = False elif parts and parts[0] == "--help": diff --git a/mycli/packages/special/main.py b/mycli/packages/special/main.py index 1600a03b..0c184565 100644 --- a/mycli/packages/special/main.py +++ b/mycli/packages/special/main.py @@ -1,8 +1,18 @@ from collections import namedtuple from enum import Enum import logging +import os from typing import Callable +try: + if not os.environ.get('MYCLI_LLM_OFF'): + import llm # noqa: F401 + + LLM_IMPORTED = True + else: + LLM_IMPORTED = False +except ImportError: + LLM_IMPORTED = False from pymysql.cursors import Cursor logger = logging.getLogger(__name__) @@ -179,3 +189,10 @@ def quit_(*_args): @special_command("\\G", "\\G", "Display current query results vertically.", arg_type=ArgType.NO_QUERY, case_sensitive=True) def stub(): raise NotImplementedError + + +if LLM_IMPORTED: + + @special_command("\\llm", "\\ai", "Interrogate LLM.", arg_type=ArgType.RAW_QUERY, case_sensitive=True) + def llm_stub(): + raise NotImplementedError diff --git a/pyproject.toml b/pyproject.toml index 25117db8..288c0170 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,6 @@ dependencies = [ "pyperclip >= 1.8.1", "pycryptodomex", "pyfzf >= 0.3.1", - "llm>=0.19.0", - "setuptools", # Required by llm commands to install models - "pip", ] [build-system] @@ -35,6 +32,15 @@ build-backend = "setuptools.build_meta" [project.optional-dependencies] ssh = ["paramiko", "sshtunnel"] +llm = [ + "llm>=0.19.0", + "setuptools", # Required by llm commands to install models + "pip", +] +all = [ + "mycli[ssh]", + "mycli[llm]", +] dev = [ "behave>=1.2.6", "coverage>=7.2.7", @@ -46,6 +52,9 @@ dev = [ "pdbpp>=0.10.3", "paramiko", "sshtunnel", + "llm>=0.19.0", + "setuptools", # Required by llm commands to install models + "pip", ] [project.scripts] diff --git a/test/features/fixture_data/help_commands.txt b/test/features/fixture_data/help_commands.txt index 86fccbe6..9cb21324 100644 --- a/test/features/fixture_data/help_commands.txt +++ b/test/features/fixture_data/help_commands.txt @@ -9,6 +9,7 @@ | \fd | \fd [name] | Delete a favorite query. | | \fs | \fs name query | Save a favorite query. | | \l | \l | List databases. | +| \llm | \ai | Interrogate LLM. | | \once | \o [-o] filename | Append next result to an output file (overwrite using -o). | | \pipe_once | \| command | Send next result to a subprocess. | | \timing | \t | Toggle timing of commands. | diff --git a/test/features/steps/crud_database.py b/test/features/steps/crud_database.py index 6cefb123..0e1726f5 100644 --- a/test/features/steps/crud_database.py +++ b/test/features/steps/crud_database.py @@ -75,6 +75,9 @@ def step_see_prompt(context): @then("we see help output") def step_see_help(context): for expected_line in context.fixture_data["help_commands.txt"]: + # in case tests are run without extras + if 'LLM' in expected_line: + continue wrappers.expect_exact(context, expected_line, timeout=1)