Author: Daniel Baumann <daniel@debian.org>
Description: Removing llm python command as src:llm is not in main (Closes: #1120990).

diff -Naurp litecli.orig/litecli/main.py litecli/litecli/main.py
--- litecli.orig/litecli/main.py
+++ litecli/litecli/main.py
@@ -458,31 +458,6 @@ class LiteCli(object):
                     self.echo(str(e), err=True, fg="red")
                     return
 
-                while special.is_llm_command(text):
-                    try:
-                        start = time()
-                        assert self.sqlexecute is not None
-                        cur = self.sqlexecute.conn and self.sqlexecute.conn.cursor()
-                        context, sql, duration = special.handle_llm(text, cur)
-                        if context:
-                            click.echo("LLM Reponse:")
-                            click.echo(context)
-                            click.echo("---")
-                        click.echo(f"Time: {duration:.2f} seconds")
-                        assert self.prompt_app is not None
-                        text = self.prompt_app.prompt(default=sql)
-                    except KeyboardInterrupt:
-                        return
-                    except special.FinishIteration as e:
-                        if e.results:
-                            output_res(e.results, start)
-                        return
-                    except RuntimeError as e:
-                        logger.error("sql: %r, error: %r", text, e)
-                        logger.error("traceback: %r", traceback.format_exc())
-                        self.echo(str(e), err=True, fg="red")
-                        return
-
             if not text.strip():
                 return
 
diff -Naurp litecli.orig/litecli/packages/completion_engine.py litecli/litecli/packages/completion_engine.py
--- litecli.orig/litecli/packages/completion_engine.py
+++ litecli/litecli/packages/completion_engine.py
@@ -121,9 +121,6 @@ def suggest_special(text: str) -> list[d
         else:
             return [{"type": "table", "schema": []}]
 
-    if cmd in [".llm", ".ai", "\\llm", "\\ai"]:
-        return [{"type": "llm"}]
-
     return [{"type": "keyword"}, {"type": "special"}]
 
 
diff -Naurp litecli.orig/litecli/packages/special/main.py litecli/litecli/packages/special/main.py
--- litecli.orig/litecli/packages/special/main.py
+++ litecli/litecli/packages/special/main.py
@@ -176,13 +176,5 @@ def quit(*_args: Any) -> None:
     arg_type=NO_QUERY,
     case_sensitive=True,
 )
-@special_command(
-    "\\llm",
-    "\\ai",
-    "Use LLM to construct a SQL query.",
-    arg_type=NO_QUERY,
-    case_sensitive=False,
-    aliases=(".ai", ".llm"),
-)
 def stub() -> None:
     raise NotImplementedError
diff -Naurp litecli.orig/litecli/sqlcompleter.py litecli/litecli/sqlcompleter.py
--- litecli.orig/litecli/sqlcompleter.py
+++ litecli/litecli/sqlcompleter.py
@@ -11,7 +11,6 @@ from prompt_toolkit.completion.base impo
 from .packages.completion_engine import suggest_type
 from .packages.parseutils import last_word
 from .packages.special.iocommands import favoritequeries
-from .packages.special import llm
 from .packages.filepaths import parse_path, complete_path, suggest_path
 
 _logger = logging.getLogger(__name__)
@@ -539,19 +538,6 @@ class SQLCompleter(Completer):
             elif suggestion["type"] == "file_name":
                 file_names = self.find_files(word_before_cursor)
                 completions.extend(file_names)
-            elif suggestion["type"] == "llm":
-                if not word_before_cursor:
-                    tokens = document.text.split()[1:]
-                else:
-                    tokens = document.text.split()[1:-1]
-                possible_entries = llm.get_completions(tokens)
-                subcommands = self.find_matches(
-                    word_before_cursor,
-                    possible_entries,
-                    start_only=False,
-                    fuzzy=True,
-                )
-                completions.extend(subcommands)
 
         return completions
 
