"""
API arguments and calls

.. attention:: Call **handshake**: In production, we occasionally see ``W: Remote handshake failed for ...: 'tuple' object has no attribute 'signatures'``. Reproduce?
.. attention:: Call **debmirror** (external workarounds): ``apt update`` fails on ``experimental`` suites (contents not mirrored) (:debbug:`819925`)

  This happens only on systems where APT is configured to download contents, most likely just because
  ``apt-file`` is installed. So, the easiest workaround is::

    apt purge apt-file

  Closest to an actual fix is to install debmirror variant '+abfixes' from
  ``Hellfield Archive``, where the "bug" has been fixed (June 2023: at least
  available for bullseye and bookworm).

.. note:: Call **stop**: *Ongoing* ``PACKAGING`` or ``BUILDING`` are lost on restart

  Stopping/restarting an instance will make ongoing ``BUILDING`` or ``PACKAGING`` fail -- you need to :apicall:`retry`
  the whole (no-longer *ongoing*) ``PACKAGING`` then (from the repository instance).

  It's probably wise *not* to do *unattended upgrades* of the mini-buildd Debian package on heavily used production systems.

.. note:: Call **keyring-packages**: No compat for urold <= squeeze (``apt-key add``)

  Since ``2.x``, keyring packages will use ``/etc/apt/trusted.gpg.d/<foo>.gpg``, not deprecated ``apt-key add <foo>``.

  In Debian, this is supported since ``wheezy (2013)``.

  For distributions ``<= squeeze`` (apt versions ``~<= 0.8.x``), you would manually have to run ``apt-key add /etc/apt/trusted.gpg.d/<foo>.gpg`` after installation of the keyring package.

.. note:: Call **keyring-packages**: squeeze or older only: apt secure problems after initial (unauthorized) install of the archive-key package (:debbug:`657561`)

  - aptitude always shows <NULL> archive

  You can verify this problem via::

    # aptitude -v show YOURID-archive-keyring | grep ^Archive
    Archive: <NULL>, now

  - BADSIG when verifying the archive keyring package's signature

  Both might be variants of Debian bug above (known to occur for <= squeeze). For both, check if this::

    # rm -rf /var/lib/apt/lists/*
    # apt-get update

  fixes it.
"""

import abc
import argparse
import collections
import copy
import enum
import glob
import hashlib
import inspect
import json
import logging
import os
import pathlib
import re
import shlex
import sys
import textwrap
import threading
from contextlib import closing

from mini_buildd import call, changes, config, daemon, dist, events, files, net, package, pool, util, values

LOG = logging.getLogger(__name__)


class Argument():
    """
    Generic Argument Class

    ``value()`` always provides a non-``None`` value of the specific
    type, either the ``default`` or a ``given`` value.

    ``strvalue()`` always provides a non-``None`` ``str`` value.

    The ``default`` value is given in the constructor. For
    server-specific defaults, this may be function -- then the default
    value will be computed only at run time on the server.

    A ``given`` value can be provided via special ``set()`` method:
      * Empty ``str``, ``list`` or false ``bool`` will yield ``None``.
      * Non-empty ``str`` will be converted to the specific type.
      * Other given values will be used as is.

    ============  ============ ============ =========== =================
    Type          value() type svalue() ex. HTML GET    argparse
    ============  ============ ============ =========== =================
    Str           str          "string"     key=string  --key "string"
    Url           str          "http://.."  key=string  --key "string"
    MultilineStr  str          "long"       key=string  --key "string"
    Choice        str          "c0"         key=string  --key "string"
    Int           int          "17"         key=string  --key "int"
    Bool          bool         "True"       key=True    --key
    List          list of str  "v0,v1,.."   key=v0,v1.. --key "v0" "v1"..
    ============  ============ ============ =========== =================
    """

    HTML_TYPE = None
    HTML_SYNTAX = None

    #: Validate that values are actually of that type
    VALUE_TYPE = str

    #: Magic string value to use as value when a default callable on the server should be used.
    SERVER_DEFAULT = "<server_default>"

    def __init__(self, id_list, doc="Undocumented", default=None, choices=None, header=None):
        """
        :param id_list: List like '['--with-rollbacks', '-R']' for option or '['distributions']' for positionals; 1st entry always denotes the id

        >>> Argument(["--long-id", "-s"]).identity
        'long_id'
        >>> Argument(["posi-tional"]).identity
        'posi_tional'
        """
        # Identifiers && doc
        self.id_list = id_list
        self.doc = doc
        self.is_positional = not id_list[0].startswith("--")
        # identity: 1st of id_list with leading '--' removed and hyphens turned to snake case
        self.identity = util.Snake(id_list[0][0 if self.is_positional else 2:]).from_kebab()

        # Values
        self._default = default
        self.given = None

        # Choices helper
        self._choices = choices
        self.header = header

    def __str__(self):
        """Be sure not to use value() here"""
        return f"{self.identity}: given={self.given}, _default={self._default}"

    @classmethod
    def s2v(cls, str_value):
        """Convert string to value"""
        return cls.VALUE_TYPE(str_value) if str_value else None

    @classmethod
    def v2s(cls, value):
        """Convert value to string"""
        return "" if value is None else str(value)

    def required(self):
        return self._default is None

    def needs_value(self):
        """If user input is no_value"""
        return self.given is None and self.required()

    def _get_value(self, value):
        return value() if callable(value) else value

    def choices(self):
        return [] if self._choices is None else self._get_value(self._choices)

    def default(self):
        return self._get_value(self._default)

    def strdefault(self):
        return self.v2s(self.default())

    def value(self):
        if self.needs_value():
            raise util.HTTPBadRequest(f"Missing required argument: {self}")
        return self.default() if self.given is None else self.given

    def strvalue(self):
        return self.v2s(self.value())

    def strgiven(self):
        return self.v2s(self.given)

    def icommand_line_given(self):
        yield self.strgiven()

    def set(self, given):
        if given == self.SERVER_DEFAULT:
            self.given = None
        elif isinstance(given, str):
            self.given = self.s2v(given)
        elif isinstance(given, (list, bool)):
            self.given = given if given else None
        elif isinstance(given, self.VALUE_TYPE):
            self.given = given
        else:
            raise util.HTTPBadRequest(f"API argument '{self.identity}': Invalid value {given} (type given {type(given)}, needs {self.VALUE_TYPE})")

    def argparse_kvsargs(self):
        """Python 'argparse' support"""
        kvsargs = {"help": self.doc}
        if self._default is not None:
            if callable(self._default):
                kvsargs["default"] = self.SERVER_DEFAULT
                kvsargs["help"] += f"\nServer Default: {self._default.__name__}"
            else:
                kvsargs["default"] = self._default
        return kvsargs

    def usage(self):
        return f"{self.identity}: {self.doc}\n\nFormat: {self.HTML_SYNTAX}"


class StrArgument(Argument):
    HTML_TYPE = "text"
    HTML_SYNTAX = "Plain string"

    def argparse_kvsargs(self):
        return {**super().argparse_kvsargs(), **{"action": "store"}}


class UrlArgument(StrArgument):
    HTML_TYPE = "url"
    HTML_SYNTAX = "URL string"


class MultilineStrArgument(StrArgument):
    HTML_TYPE = "textarea"
    HTML_SYNTAX = "Multiline string"


class ChoiceArgument(Argument):
    HTML_TYPE = "select"
    HTML_SYNTAX = "Choice"

    def value(self):
        value = super().value()
        choices = self.choices()
        if value not in choices:
            raise util.HTTPBadRequest(f"{self.identity}: Wrong choice argument: '{value}' not in {choices}")
        return value

    def argparse_kvsargs(self):
        kvsargs = super().argparse_kvsargs()
        kvsargs["choices"] = self._choices
        if self.is_positional and self._default is not None:
            kvsargs["nargs"] = "?"  # Allow positional args to be actually optional, if we have a default.
        return kvsargs


class IntArgument(StrArgument):
    VALUE_TYPE = int
    HTML_TYPE = "number"
    HTML_SYNTAX = "Integer number"

    def argparse_kvsargs(self):
        return {**super().argparse_kvsargs(), **{"type": int}}


class BoolArgument(ChoiceArgument):
    VALUE_TYPE = bool
    HTML_TYPE = "checkbox"
    HTML_SYNTAX = "Checkbox"

    def __init__(self, *args, **kwargs):
        super().__init__(*args, choices=[True, False], default=False, **kwargs)

    @classmethod
    def s2v(cls, str_value):
        return str_value == str(True)

    def icommand_line_given(self):
        """Empty generator -- bools are just command line options like ``--with-foo``"""
        yield from []

    def argparse_kvsargs(self):
        kvsargs = Argument.argparse_kvsargs(self)
        kvsargs["action"] = "store_true"
        return kvsargs


class _ExtendAction(argparse._AppendAction):  # pylint: disable=protected-access
    """
    Customized argparse ``extend`` action from ``python 3.8``

    * Allows ``-L a -L b -L c`` as well as ``-L a b c`` to be combined to one flat list ``a b c``.
    * 'Fixes' argparse's behavior to add given args to the default value, see: https://github.com/python/cpython/issues/60603.
    * Does not interfere with special default value Argument.SERVER_DEFAULT
    """

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.reset_default = True

    # __call___ is run for each ``--arg`` given on command line
    def __call__(self, parser, namespace, _values, option_string=None):
        items = [] if self.reset_default else getattr(namespace, self.dest, None)
        self.reset_default = False

        items = argparse._copy_items(items)
        items.extend(_values)
        setattr(namespace, self.dest, items)


class ListArgument(StrArgument):
    VALUE_TYPE = list
    SEPARATOR = ","
    HTML_SYNTAX = "List as comma-separated strings"

    @classmethod
    def s2v(cls, str_value):
        return str_value.split(cls.SEPARATOR) if str_value else None

    @classmethod
    def v2s(cls, value):
        return "" if value is None else cls.SEPARATOR.join(value)

    def icommand_line_given(self):
        if self.given is not None:
            yield from self.given

    def argparse_kvsargs(self):
        kvsargs = super().argparse_kvsargs()
        kvsargs["nargs"] = "+"
        kvsargs["action"] = _ExtendAction
        if self._choices is not None and not callable(self._choices):
            kvsargs["choices"] = self._choices
        kvsargs["metavar"] = util.singularize(self.identity.upper())
        return kvsargs


#: Specialized argument classes
class Repository(StrArgument):
    def __init__(self, id_list, **kwargs):
        super().__init__(id_list, choices=values.all_repositories, doc="Repository identity (like 'test', the default sandbox repo identity)", **kwargs)

    def object(self):
        return util.models().get_repository(self.value())


class Repositories(ListArgument):
    def __init__(self, id_list, **kwargs):
        super().__init__(
            id_list,
            choices=values.all_repositories,
            doc="Repository identities (like 'test', the default sandbox repo identity)",
            **kwargs,
        )

    def objects(self):
        return util.models().Repository.objects.filter(identity__in=self.value())


class Codenames(ListArgument):
    def __init__(self, id_list, **kwargs):
        super().__init__(
            id_list,
            choices=values.all_codenames,
            doc="Codenames (like 'buster', 'bullseye')",
            **kwargs,
        )


def diststr2repository(diststr):
    return util.models().get_repository(dist.Dist(diststr).repository)


class Distribution(StrArgument):
    def __init__(self, id_list, choices=values.active_distributions, extra_doc="", **kwargs):
        super().__init__(id_list, choices=choices, doc="Distribution name (<codename>-<id>-<suite>[-rollback<n>])" + extra_doc, **kwargs)

    def dist(self):
        return dist.Dist(self.value())


class Distributions(ListArgument):
    def __init__(self, id_list, choices=values.active_distributions, **kwargs):
        super().__init__(id_list, choices=choices, doc="Distribution names", **kwargs)


class Source(StrArgument):
    def __init__(self, id_list, extra_doc="", **kwargs):
        super().__init__(id_list, choices=values.last_sources, doc="Source package name" + extra_doc, **kwargs)


class UploadOptions(StrArgument):
    def __init__(self, id_list, **kwargs):
        super().__init__(
            id_list,
            default="lintian-mode=ignore",
            doc=(
                "List of upload options, separated by '|':\n"
                "\n"
                f"{textwrap.indent(changes.Upload.Options.usage(), '  ')}"
            ),
            **kwargs)


class Output(ChoiceArgument):
    """Meta API call option 'output'"""

    def __init__(self):
        super().__init__(
            ["output"], choices=["html", "plain", "json"],
            default="html",
            doc=(
                "Desired result format\n"
                "\n"
                "html: Return result as HTML (popup with JS, else full HTML page)\n"
                "plain: Plain result\n"
                "json: Return raw json result\n"
            ))


class Rollbacks(IntArgument):
    def __init__(self, id_list, extra_doc="", **kwargs):
        super().__init__(id_list, default=0, doc="Select (maximal) <n> rollback sources (``0``: no rollbacks, ``< 0``: all rollbacks)" + extra_doc, **kwargs)

    def range(self, suite):
        """Get valid range for this suite"""
        rollbacks = self.value()
        if rollbacks < 0:
            rollbacks = suite.rollback
        return range(0, min(rollbacks, suite.rollback))


class Call():
    AUTH = config.AUTH_NONE
    NEEDS_RUNNING_DAEMON = False
    CONFIRM = False
    RESULT_DESC = ""
    CATEGORY = "Consumer"

    @classmethod
    def name(cls):
        return util.Snake(cls.__name__).from_camel()

    def __str__(self):
        return f"API call '{self.name()}'"

    @classmethod
    def doc(cls):
        return (
            f"{inspect.getdoc(cls)}\n"
            f"{f'\nJSON result description:\n\n{textwrap.indent(cls.RESULT_DESC, "  ")}\n' if cls.RESULT_DESC else ''}"
            "\n"
            f"{'This is a MAINTENANCE call: Daemon will be automatically STOPPED while running, any ongoing events will be CANCELED.\n\n' if issubclass(cls, _Maintenance) else ''}"
            f"Authorization: {cls.AUTH}"
        )

    @classmethod
    def doc_title(cls):
        return cls.doc().partition("\n")[0]

    @classmethod
    def doc_body(cls):
        return cls.doc().partition("\n")[2][1:]

    @classmethod
    def uri(cls):
        return config.URIS["api"]["view"].join(cls.name() + "/")

    @classmethod
    def iarguments(cls):
        yield from []

    @classmethod
    def from_sloppy_args(cls, **kwargs):
        """Construct ignoring any unknown arguments given"""
        keys = [a.identity for a in cls.iarguments()]
        return cls(**{key: value for key, value in kwargs.items() if key in keys})

    def set_args(self, **kwargs):
        for key, value in kwargs.items():
            if key not in self.args:
                raise util.HTTPBadRequest(f"API call '{self.name()}': Unknown argument '{key}'")
            self.args[key].set(value)

        for key in (key for key in self.args if key in kwargs):
            self.args[key].set(kwargs.get(key))

    def __init__(self, **kwargs):
        self.args = {arg.identity: arg for arg in self.iarguments()}
        self.set_args(**kwargs)
        self.request = None
        self.result = {}  # json result

    @classmethod
    def from_command_line(cls, command_line):
        return cls(**cls.parse_command_line(command_line))

    def set_request(self, request):
        self.request = request

    @classmethod
    def get_plain(cls, result, force_json=False):
        """Get ``str`` result (non-``str`` results get json pretty-formatted)"""
        return util.json_pretty(result) if (force_json or not isinstance(result, str)) else result

    def plain(self, force_json=False):
        return self.get_plain(self.result, force_json=force_json)

    def json_pretty(self):
        """For (arg-less) use in templates only"""
        return self.plain(force_json=True)

    @classmethod
    def parse_command_line(cls, command_line):
        class RaisingArgumentParser(argparse.ArgumentParser):
            """Make argparse raise only (not exit) on error. See https://bugs.python.org/issue41255"""

            def error(self, message):
                raise util.HTTPBadRequest(message)

        parser = RaisingArgumentParser()
        for argument in cls.iarguments():
            parser.add_argument(*argument.id_list, **argument.argparse_kvsargs())
        return vars(parser.parse_args(args=shlex.split(command_line)))

    def icommand_line(self, full=False, with_user=False, user=None, exclude=None):
        _exclude = [] if exclude is None else exclude

        if full:
            yield "mini-buildd-api"
            yield self.name()
            yield util.http_endpoint().geturl(with_user=with_user, user=user)

        for arg in (a for a in self.args.values() if a.identity not in _exclude and a.given is not None):
            if arg.is_positional:
                yield from arg.icommand_line_given()
            else:
                yield arg.id_list[0]
                yield from arg.icommand_line_given()

    def command_line(self, full=False, with_user=False, user=None, exclude=None):
        return shlex.join(self.icommand_line(full=full, with_user=with_user, user=user, exclude=exclude))

    def command_line_full(self):
        user = self.request.user if self.AUTH is not config.AUTH_NONE and self.request is not None and self.request.user.is_authenticated else None
        return self.command_line(full=True, with_user=user is not None, user=user)

    def http_args(self, with_confirm=False, with_output=None):
        http_args = {}
        for arg in self.args.values():
            if arg.given is not None:
                http_args[arg.identity] = arg.strgiven()

        if with_confirm:
            http_args["confirm"] = self.name()
        if with_output is not None:
            http_args["output"] = with_output

        return http_args

    def url(self, endpoint=None, with_confirm=False, with_output="html", relative=False):
        if endpoint is None:
            endpoint = util.http_endpoint()
        return endpoint.geturl(path=config.URIS["api"]["view"].join(self.name() + "/"), query=self.http_args(with_confirm=with_confirm, with_output=with_output), relative=relative)

    # For templates only
    def url_relative(self):
        return self.url(relative=True)

    @abc.abstractmethod
    def _run(self):
        pass

    def run(self):
        self._run()


#: Option shortcuts (use as mixin)
class _Admin:
    AUTH = config.AUTH_ADMIN


class _Staff:
    AUTH = config.AUTH_STAFF


class _Login:
    AUTH = config.AUTH_LOGIN


class _Running:
    NEEDS_RUNNING_DAEMON = True


class _Confirm:
    CONFIRM = True


MAINTENANCE_LOCK = threading.Lock()


class _Maintenance:
    def run(self):
        if MAINTENANCE_LOCK.acquire(blocking=False):  # pylint: disable=consider-using-with
            try:
                with daemon.Stopped():
                    self._run()
            finally:
                MAINTENANCE_LOCK.release()
        else:
            raise util.HTTPUnavailable("Another maintenance API call currently running")


#: Category options (mixins). For structuring documentation only (API index).
class _CatConsumer:
    CATEGORY = "Consumer"


class _CatDeveloper:
    CATEGORY = "Developer"


class _CatAdministrator:
    CATEGORY = "Administrator"


CATEGORIES = [_CatConsumer.CATEGORY, _CatDeveloper.CATEGORY, _CatAdministrator.CATEGORY]


#: API calls
class Status(Call):
    """
    Get status of this instance
    """

    RESULT_DESC = """\
{
  "version": mini-buildd's version
  "identity": Instance identity
  "url": Instance URL (HTTP)
  "incoming_url": Incoming URL (currently FTP)
  "load": Instance's (0 =< load <= 1). If negative, the instance is stopped
  "chroots": List of active chroots
  "remotes": Active or auto-reactivatable remotes
  ["repositories": Simplified structural representation of all repositories]
}
"""

    @classmethod
    def iarguments(cls):
        yield BoolArgument(["--with-repositories", "-R"], doc="Also list all repositories (may be longish)")

    def _run(self):
        self.result = {
            "version": util.__version__,
            "identity": daemon.get_model().identity,
            "url": util.http_endpoint().geturl(),
            "incoming_url": daemon.get_model().mbd_get_ftp_endpoint().geturl(),
            "load": daemon.get().builder.load() if daemon.get().is_alive() else -1.0,
            "chroots": [c.mbd_key() for c in util.models().Chroot.mbd_get_active()],
            "remotes": [r.mbd_url() for r in util.models().Remote.mbd_get_active_or_auto_reactivate()],
        }
        if self.args["with_repositories"].value():
            self.result["repositories"] = {r.identity: r.mbd_json() for r in util.models().Repository.objects.all()}


class PubKey(Call):
    """
    Get ASCII-armored GnuPG public key of this instance

    Used to sign the APT repositories ('APT key') and for authorization across instances.

    See ``Customer's  Manual, APT Bootstrap`` for ways to employ the APT key on yor local system.
    """

    def _run(self):
        self.result = daemon.get().gnupg.pub_key


class DputConf(Call):
    """
    Get recommended dput config snippet

    Usually, this is for integration in your personal ``~/.dput.cf`` (which will work
    for both, ``dput`` and ``dput-ng``).

    Administrators may add custom site targets via ``~mini-buildd/etc/dput.cf``.
    """

    @classmethod
    def iarguments(cls):
        yield BoolArgument(["--without-global"], doc="Exclude recommended global config")
        yield BoolArgument(["--dput-ng-profile", "-N"], doc="Output dput-ng (json) profile (instead of traditional dput.cf config snippet) only")

    def _run(self):
        self.result = ""  # Make sure result is ``str``

        if self.args["dput_ng_profile"].value():
            self.result = daemon.get_model().mbd_get_dputng_profile()
        else:
            if not self.args["without_global"].value():
                self.result = (
                    "# Recommended global config\n"
                    "[DEFAULT]\n"
                    "# Avoid accidental uploads to default target (like 'ftp-master' for Debian)\n"
                    "default_host_main = _please_specify_target_explicitly_\n"
                    "\n"
                )

            self.result += (
                "# This instance's native target\n"
                f"{daemon.get_model().mbd_get_dput_conf()}"
                "\n"
            )

            site_config_path = config.ROUTES["etc"].path.join("dput.cf")
            if os.path.exists(site_config_path):
                with util.fopen(site_config_path) as site_config:
                    self.result += (
                        "# Custom site targets\n"
                        f"{site_config.read()}"
                    )


class SourcesList(Call):
    """
    Get sources.list (apt lines)

    Usually, this output is put to a file like ``/etc/apt/sources.list.d/mini-buildd-xyz.list``.
    """

    @classmethod
    def iarguments(cls):
        yield Codenames(["--codenames", "-C"], default=values.all_codenames)
        yield Repositories(["--repositories", "-R"], default=values.all_repositories)
        yield ListArgument(["--suites", "-S"], default=values.all_suites, choices=values.all_suites, doc="Suite names (like 'stable', 'unstable')")
        yield ListArgument(["--types", "-T"], default=["deb"], choices=["deb", "deb-src"], doc="Types of apt lines ('deb', 'deb-src')")
        yield StrArgument(["--options", "-O"], default="", doc="Apt line options ('deb[-src] [<options>] ...'). See 'man 5 source.list'")
        yield Rollbacks(["--rollbacks", "-r"])
        yield StrArgument(["--snapshot", "-P"], default="", doc="Select a repository snapshot (see ``snapshot_ls``)")
        yield StrArgument(["--mirror", "-M"], default="", doc="URL of a mirror to use (instead of mini-buildd's native URL)")
        yield BoolArgument(["--with-extra", "-X"], doc="Also list extra sources (non-base sources used for building)")
        yield BoolArgument(["--with-comment", "-D"], doc="Add comment line above apt line")

    def _run(self):
        sources_list = files.SourcesList()

        for r in self.args["repositories"].objects():
            for d in r.distributions.all().filter(base_source__codename__in=self.args["codenames"].value()):
                if self.args["with_extra"].value():
                    for e in d.extra_sources.all():
                        sources_list.append(e.source.mbd_get_apt_line())

                for s in r.layout.suiteoption_set.filter(suite__name__in=self.args["suites"].value()):
                    sources_list.append(r.mbd_get_apt_line(d, s, snapshot=self.args["snapshot"].value()))
                    for rollback in self.args["rollbacks"].range(s):
                        sources_list.append(r.mbd_get_apt_line(d, s, snapshot=self.args["snapshot"].value(), rollback=rollback))

        self.result = sources_list.get(self.args["types"].value(),
                                       mirror=self.args["mirror"].value(),
                                       options=self.args["options"].value(),
                                       with_comment=self.args["with_comment"].value())


class Ls(Call):
    """
    List source package in repository
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"])
        yield Repositories(["--repositories", "-R"], default=values.all_repositories)
        yield Codenames(["--codenames", "-C"], default=values.all_codenames)
        yield StrArgument(["--version", "-V"], default="", doc="Limit to exactly this version")
        yield StrArgument(["--min-version", "-M"], default="", doc="Limit to this version or greater")

    def _run(self):
        for repository in self.args["repositories"].objects():
            ls = repository.mbd_reprepro.ls(self.args["source"].value(),
                                            codenames=self.args["codenames"].value(),
                                            version=self.args["version"].value(),
                                            min_version=self.args["min_version"].value())
            if ls:
                self.result[repository.identity] = ls


class Show(Ls):
    """
    Show source package in repository
    """

    def _run(self):
        super()._run()
        for value in self.result.values():
            value.enrich()


class Search(Call):
    """
    Search for source or binary package names in ``repositories``, ``builds`` and ``events``

    Be cautious with binary package files found outside of ``repositories``:

    * Matches only found in ``builds`` usually failed installation as some criteria is not met.
    * Matches found in ``events`` originate from the initial (non source-only) user upload (and thus are local builds).
    """

    EMPTY_RESULT = {
        "repositories": [],
        "builds": [],
        "events": [],
    }

    MSG = {
        "repositories": "In repository, safe to use",
        "builds": "Binary packages maybe not installed/some criteria not met",
        "events": "Be cautious: Binary package originates from user upload (local build)",
    }

    @classmethod
    def iarguments(cls):
        yield StrArgument(["pattern"],
                          doc=(
                              "Pattern for binary or source package name.\n"
                              "\n"
                              "Value is a glob pattern; '*' is added automatically to begin and end of the pattern."
                          ))

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.pattern = None

    def _run(self):
        self.pattern = "*" + self.args["pattern"].value().lower() + "*"

        # Search in repositories
        repositories_route = config.ROUTES["repositories"]
        for repository in util.models().Repository.objects.all():
            for src, file_name in repository.mbd_reprepro.pool.isearch(self.pattern):
                self.result.setdefault(src, copy.deepcopy(self.EMPTY_RESULT))
                self.result[src]["repositories"].append(str(file_name.relative_to(repositories_route.path.full)))

        # Search in builds
        builds_route = config.ROUTES["builds"]
        builds_path = pool.Dir(pathlib.Path(builds_route.path.full), pooldir="", sources_glob="*")
        for src, file_name in builds_path.isearch(self.pattern):
            self.result.setdefault(src, copy.deepcopy(self.EMPTY_RESULT))
            self.result[src]["builds"].append(str(file_name.relative_to(builds_route.path.full)))

        # Search in events
        events_route = config.ROUTES["events"]
        events_path = pool.Dir(pathlib.Path(events_route.path.full), pooldir="", sources_glob="*")
        for src, file_name in events_path.isearch(self.pattern):
            self.result.setdefault(src, copy.deepcopy(self.EMPTY_RESULT))
            self.result[src]["events"].append(str(file_name.relative_to(events_route.path.full)))


MULT_VERSIONS_PER_DIST_NOTE = "Use as safeguard, or for rare cases of multiple version of the same package in one distribution (in different components)"


class Debdiff(Call):
    """
    Compare two internal source packages
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"])
        yield Repository(["repository"])
        yield StrArgument(["--versiona", "-a"], default="", doc="Version a to compare")
        yield StrArgument(["--versionb", "-b"], default="", doc="Version b to compare")

    def _run(self):
        repository = self.args["repository"].object()
        repositories_path = config.ROUTES["repositories"].path
        dsca = repository.mbd_reprepro.pool.dsc_path(self.args["source"].value(), self.args["versiona"].value())
        dscb = repository.mbd_reprepro.pool.dsc_path(self.args["source"].value(), self.args["versionb"].value())
        self.result = call.Call(["debdiff", repositories_path.join(dsca), repositories_path.join(dscb)]).stdout


class Migrate(_Staff, _Confirm, _CatDeveloper, Call):
    """
    Migrate source package

    Migrates a source package along with all its binary packages. If
    run for a rollback distribution, this will perform a rollback
    restore.
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"])
        yield Distribution(["distribution"], choices=values.migratable_distributions, extra_doc="\n\nMigrates *from* this distribution")
        yield BoolArgument(["--full", "-F"], doc="Migrate all 'migrates_to' suites up (f.e. unstable->testing->stable)")
        yield StrArgument(["--version", "-V"], default="", doc=f"Migrate exactly this version. {MULT_VERSIONS_PER_DIST_NOTE}")

    def _run(self):
        repository, distribution, suite = util.models().parse_dist(self.args["distribution"].dist())
        version = self.args["version"].value()
        repository.mbd_package_migrate(self.args["source"].value(),
                                       distribution,
                                       suite,
                                       full=self.args["full"].value(),
                                       rollback=self.args["distribution"].dist().rollback_no,
                                       version=version if version else None)


class Remove(_Admin, _Confirm, _CatAdministrator, Call):
    """
    Remove source package

    Removes a source package along with all its binary packages.
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"])
        yield Distribution(["distribution"])
        yield BoolArgument(["--without-rollback"], doc="Don't copy to rollback distribution")
        yield StrArgument(["--version", "-V"], default="", doc=f"Remove exactly this version. {MULT_VERSIONS_PER_DIST_NOTE}")

    def _run(self):
        repository, distribution, suite = util.models().parse_dist(self.args["distribution"].dist())
        version = self.args["version"].value()
        repository.mbd_package_remove(self.args["source"].value(),
                                      distribution,
                                      suite,
                                      rollback=self.args["distribution"].dist().rollback_no,
                                      version=version if version else None,
                                      without_rollback=self.args["without_rollback"].value())


class Port(_Staff, _Running, _Confirm, _CatDeveloper, Call):
    """
    Port internal source package

    An internal 'port' is a no-changes (i.e., only the changelog will be adapted)
    rebuild of the given locally-installed package.

    When ``from_distribution`` equals ``to_distribution``, a rebuild will be done.
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"])
        yield Distribution(["from_distribution"])
        yield Distributions(["to_distributions"], choices=values.active_uploadable_distributions)
        yield StrArgument(["--version", "-V"], default="", doc=f"Port exactly this version. {MULT_VERSIONS_PER_DIST_NOTE}")
        yield UploadOptions(["--options", "-O"])

    def _run(self):
        self.result = []
        version = self.args["version"].value()
        for to_diststr in set(self.args["to_distributions"].value()):
            self.result.append(package.port(self.args["source"].value(),
                                            self.args["from_distribution"].value(),
                                            to_diststr,
                                            version=version if version else None,
                                            options=self.args["options"].value().split("|")))


class PortExt(_Staff, _Running, _Confirm, _CatDeveloper, Call):
    """
    Port external source package

    An external 'port' is a no-changes (i.e., only the changelog will be adapted)
    rebuild of any given source package.
    """

    @classmethod
    def iarguments(cls):
        yield UrlArgument(["dsc"], doc="URL of any Debian source package (dsc) to port")
        yield BoolArgument(["--allow-unauthenticated", "-u"], doc="Don't verify downloaded DSC against Debian keyrings (see ``man dget``)")
        yield Distributions(["distributions"], choices=values.active_uploadable_distributions)
        yield UploadOptions(["--options", "-O"])

    def _run(self):
        self.result = []
        for diststr in set(self.args["distributions"].value()):
            self.result.append(package.port_ext(self.args["dsc"].value(),
                                                diststr,
                                                options=self.args["options"].value().split("|"),
                                                allow_unauthenticated=self.args["allow_unauthenticated"].value()))


class Retry(_Staff, _Running, _Confirm, _CatDeveloper, Call):
    """
    Retry a previously failed source package
    """

    RESULT_DESC = "Changes file name that has been re-uploaded"
    BKEY_FORMAT = "<source>/<version>/<timecode>/source[ <arch>]"
    BKEY_REGEX = re.compile(r"[^/]+/[^/]+/[^/]+/[^/]+")

    @classmethod
    def iarguments(cls):
        yield StrArgument(["bkey"], choices=values.last_failed_bkeys, doc=f"Package bkey (``{cls.BKEY_FORMAT}``) of a past packaging try (see ``extra.bkey`` of a failed PACKAGING event)")

    def _run(self):
        bkey = self.args["bkey"].value()
        if not self.BKEY_REGEX.match(bkey):
            raise util.HTTPBadRequest(f"Wrong bkey format. Should be '{self.BKEY_FORMAT}'")

        events_path = config.ROUTES["events"].path.new_sub([bkey])
        if not os.path.exists(events_path.full):
            raise util.HTTPBadRequest(f"No such event path: {bkey}")

        # Check that there is exactly one failed/rejected event in the given path
        failed_events = glob.glob(events_path.join("*_FAILED.json")) + glob.glob(events_path.join("*_REJECTED.json"))
        if len(failed_events) != 1:
            raise util.HTTPBadRequest(f"{len(failed_events)} failed events found (we need exactly 1). Check your bkey")
        failed_event = events.Event.load(failed_events[0])
        LOG.debug("Retry: Found valid FAILED event: %s", failed_event)

        # Find all changes in that path
        changes_files = glob.glob(events_path.join("*.changes"))
        if len(changes_files) != 1:
            raise util.HTTPBadRequest(f"{len(changes_files)} changes found (we need exactly 1). Check your bkey")
        changes_filename = changes_files[0]
        LOG.debug("Retry: Found solitaire changes file: %s", changes_filename)

        # Should be save now to re-upload the changes files found in path
        changes.Base(changes_filename).upload(daemon.get_model().mbd_get_ftp_endpoint(), force=True)
        self.result = os.path.basename(changes_filename)


class Cancel(_Staff, _Running, _Confirm, _CatDeveloper, Call):
    """Cancel an ongoing package build"""

    @classmethod
    def iarguments(cls):
        yield StrArgument(["bkey"], choices=values.current_builds, doc="Build key (``<source>/<version>/<timecode>/<arch>`` of ongoing builds; see ``extra.bkey`` in BUILDING events)")

    def _run(self):
        daemon.get().builder.cancel(self.args["bkey"].value(), f"{self.request.user}")
        self.result = self.args["bkey"].value()


class SetUserKey(_Login, _Confirm, _CatDeveloper, Call):
    """
    Set a user's GnuPG public key
    """

    @classmethod
    def iarguments(cls):
        yield MultilineStrArgument(["key"], doc="GnuPG public key; multiline inputs will be handled as ascii armored full key, one-liners as key ids")

    def _run(self):
        uploader = self.request.user.uploader
        uploader.Admin.mbd_remove(uploader)
        key = self.args["key"].value()

        if "\n" in key:
            LOG.debug("Using given key argument as full ascii-armored GPG key")
            uploader.key_id = ""
            uploader.key = key
        else:
            LOG.debug("Using given key argument as key ID")
            uploader.key_id = key
            uploader.key = ""

        uploader.Admin.mbd_prepare(uploader)
        uploader.Admin.mbd_check(uploader)
        LOG.warning("Uploader profile changed: %s (must be (re-)activated by mini-buildd staff before you can actually use it)", uploader)


class Subscribe(_Login, Call):
    """
    Subscribe to (email) notifications
    """

    @classmethod
    def iarguments(cls):
        yield Source(["source"], default="", extra_doc="\n\nLeave empty for all source packages")
        yield Distribution(["distribution"], default="", choices=values.prepared_distributions, extra_doc="\n\nLeave empty for all distributions")

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.source = self.args["source"].value()
        self.sourcestr = "" if self.source is None else self.source
        self.distribution = self.args["distribution"].value()
        self.distributionstr = "" if self.distribution is None else self.distribution

    def _run(self):
        s, created = util.models().Subscription.objects.get_or_create(
            subscriber=self.request.user,
            package=self.sourcestr,
            distribution=self.distributionstr,
        )
        self.result = {"created": created, "source": s.package, "distribution": s.distribution}


class Unsubscribe(Subscribe):
    """
    Unsubscribe from (email) notifications
    """

    def _run(self):
        self.result = []
        for s in util.models().Subscription.objects.filter(subscriber=self.request.user):
            if (self.sourcestr == s.package) and (self.distributionstr == s.distribution):
                s.delete()
                self.result.append({"source": s.package, "distribution": s.distribution})


class RemakeChroots(_Admin, _Confirm, _Maintenance, _CatAdministrator, Call):
    """
    Remake chroots

    Run actions 'remove', 'prepare', 'check' and 'activate'.
    """

    @classmethod
    def iarguments(cls):
        yield ListArgument(["--keys"], default=values.all_chroots, choices=values.all_chroots, doc="Chroot keys (<codename>:<arch>)")

    def _run(self):
        for key in self.args["keys"].value():
            try:
                codename, arch = key.split(":")
                chroot = util.models().Chroot.objects.get(source__codename=codename, architecture__name=arch)
                chroot.Admin.mbd_remove(chroot)
                chroot.Admin.mbd_prepare(chroot)
                chroot.Admin.mbd_check(chroot)
                chroot.Admin.mbd_activate(chroot)
                self.result[key] = "Success"
            except BaseException as e:
                self.result[key] = f"Failed: {util.e2http(e)}"


class Reindex(_Admin, _Confirm, _Maintenance, _CatAdministrator, Call):
    """
    Force repository reindex

    Usually, you reindex by doing an explicit ``check`` on a Repository
    instance -- however, that method would not reindex in case the
    repository config is unchanged. Use this call if you feel you need it
    anyway for whatever reason.

    If you just upgraded from 2.2.x, it's recommended to run it once
    (2.4.x reindex additionally runs ``repreprop flood``, which may make
    some arch=all packages available for (subsequently) added
    architectures).
    """

    @classmethod
    def iarguments(cls):
        yield Repositories(["--repositories", "-R"], default=values.all_repositories)

    def _run(self):
        for r in self.args["repositories"].objects():
            r.mbd_reprepro_update_config(force_reindex=True)


class Start(_Admin, _Confirm, _CatAdministrator, Call):
    """
    Start Daemon (accept incoming)

    Does nothing if already started; will fail if Daemon instance is not activated.
    """

    def _run(self):
        daemon.start()


class Stop(_Admin, _Confirm, _CatAdministrator, Call):
    """
    Stop Daemon (stop accepting incoming)

    Does nothing if already stopped. Any ongoing events will be CANCELED.

    This state is *not persisted*. Please *deactivate* the Daemon instance via :mbdpage:`setup` to persist over *mini-buildd service* restarts.
    """

    def _run(self):
        daemon.stop()


class Wake(_Staff, _Confirm, _CatDeveloper, Call):
    """
    Wake a remote instance
    """

    @classmethod
    def iarguments(cls):
        yield StrArgument(["--remote", "-r"], choices=values.all_remotes)
        yield IntArgument(["--sleep", "-s"], default=5, doc="Sleep between wake attempts")
        yield IntArgument(["--attempts", "-a"], default=3, doc="Max number attempts")

    def _run(self):
        remote = util.models().Remote.objects.get(http=self.args["remote"].value())
        self.result = remote.mbd_get_status(wake=True, wake_sleep=self.args["sleep"].value(), wake_attempts=self.args["attempts"].value())


class Handshake(_CatAdministrator, Call):
    """
    Check if signed message matches a remote, reply our signed message on success

    This is for internal use only.
    """

    @classmethod
    def iarguments(cls):
        yield MultilineStrArgument(["--signed-message", "-S"])

    def _run(self):
        signed_message = self.args["signed_message"].value()
        for r in util.models().Remote.objects.all():
            try:
                r.mbd_verify(signed_message)
                self.result = daemon.get().handshake_message()
                LOG.debug("Remote handshake ok: '%s': %s: %s", r, r.key_long_id, r.key_name)
                return
            except Exception as e:
                util.log_exception(LOG, f"Remote handshake failed for '{r}'", e)
        raise util.HTTPBadRequest(f"GnuPG handshake failed: No remote for public key on {util.http_endpoint()}")


class Cronjob(_Admin, _Confirm, _CatAdministrator, Call):
    """
    Run a cron job now (out of schedule)
    """

    @classmethod
    def iarguments(cls):
        yield StrArgument(["id"], choices=values.all_cronjobs)

    def _run(self):
        job = daemon.get().crontab.get(self.args["id"].value())
        self.result[job.id()] = job.run()


class Uploaders(_Admin, _Running, _CatAdministrator, Call):
    """
    Get upload permissions for repositories
    """

    @classmethod
    def iarguments(cls):
        yield Repositories(["--repositories", "-R"], default=values.all_repositories)

    def _run(self):
        for r in self.args["repositories"].objects():
            self.result[r.identity] = {"allow_unauthenticated_uploads": r.allow_unauthenticated_uploads}
            with closing(daemon.UploadersKeyring(r.identity)) as gpg:
                self.result[r.identity]["uploaders"] = gpg.get_pub_keys_infos()


class SnapshotLs(_Running, Call):
    """
    Get list of repository snapshots for a distribution
    """

    @classmethod
    def iarguments(cls):
        yield Distribution(["distribution"])

    def _run(self):
        diststr = self.args["distribution"].value()
        self.result = diststr2repository(diststr).mbd_reprepro.get_snapshots(diststr)


class SnapshotCreate(_Admin, _Confirm, _CatAdministrator, SnapshotLs):
    """
    Create a repository snapshot
    """

    @classmethod
    def iarguments(cls):
        yield from SnapshotLs.iarguments()
        yield StrArgument(["name"], doc="Snapshot name")

    def _run(self):
        diststr = self.args["distribution"].value()
        diststr2repository(diststr).mbd_reprepro.gen_snapshot(diststr, self.args["name"].value())


class SnapshotDelete(SnapshotCreate):
    """
    Delete a repository snapshot
    """

    def _run(self):
        diststr = self.args["distribution"].value()
        diststr2repository(diststr).mbd_reprepro.del_snapshot(diststr, self.args["name"].value())


class Debmirror(_Admin, _Confirm, _CatAdministrator, Call):
    """
    Make local partial repository mirror via :debpkg:`debmirror`

    This may be useful if you plan on publishing a stripped-down (f.e., only certain repos,
    only ``stable``, omit rollbacks) variant of your repo somewhere remote.
    """

    PROGRAM = "/usr/bin/debmirror"
    DEB = "debmirror"

    @classmethod
    def iarguments(cls):
        yield ListArgument(["--suites", "-S"], default=values.all_suites, choices=values.all_suites, doc="Suite names (like 'stable', 'unstable')")
        yield Rollbacks(["--rollbacks", "-r"])
        yield Repositories(["--repositories", "-R"], default=values.all_repositories)
        yield Codenames(["--codenames", "-c"], default=values.all_codenames)
        yield StrArgument(
            ["--architectures", "-A"],
            default="amd64,i386,arm64,armhf,armel,s390x,ppc64el,mipsel,mips64el",
            doc=(
                "Architectures to mirror (``--arch`` in debmirror)\n\n"
                "Usually, you just want all, so the default already lists all currently supported Debian/Ubuntu architectures (but will only mirror the architectures actually found)"
            ))
        yield StrArgument(
            ["--components", "-C"],
            default="main,contrib,non-free,main/debian-installer,multiverse,restricted,universe",
            doc=(
                "Components to mirror (``--section`` in debmirror)\n\n"
                "Usually, you just want all, so the default already lists all known Debian/Ubuntu components (but will only mirror the components actually found)"
            ))
        yield StrArgument(
            ["--destination", "-D"],
            default=values.default_debmirror_destination,
            doc=(
                "Mirror destination dir (``mirrordir`` in debmirror)\n"
                "\n"
                "* ``{}`` will be replaced by repository identity\n"
                "* Directory must be accessible by user ``mini-buildd``\n"
                "\n"
                "*BE CAREFUL* with this value -- anything in this directory will be *HAPPILY REPLACED* by the mirror only\n"
            ))

    def _run(self):
        util.check_program(self.PROGRAM, self.DEB)
        for r in self.args["repositories"].objects():
            debmirror = [
                self.PROGRAM,
                "--verbose",
                "--ignore-release-gpg",  # change
                "--diff", "none",
                "--getcontents",
                "--rsync-extra", "none",
                "--host", util.http_endpoint().hopo(),
                "--method", util.http_endpoint().scheme(),
                "--root", f"/repositories/{r.identity}",
                "--dist", ",".join(r.mbd_get_diststrs(frollbacks=self.args["rollbacks"].range,
                                                      distributions_filter={"base_source__codename__in": self.args["codenames"].value()},
                                                      suiteoption_filter={"suite__name__in": self.args["suites"].value()})),
                "--arch", self.args["architectures"].value(),
                "--section", self.args["components"].value(),
                self.args["destination"].value().format(r.identity),
            ]
            LOG.debug(debmirror)
            call.Call(debmirror).check()


class KeyringPackages(_Admin, _Running, _Confirm, _CatAdministrator, Call):
    """
    Build keyring packages
    """

    @classmethod
    def iarguments(cls):
        yield Distributions(["--distributions", "-D"], default=values.active_keyring_distributions)
        yield BoolArgument(["--without-migration", "-M"], doc="Don't migrate packages")

    def _run(self):
        self.result = []
        events_queue = events.Attach(daemon.get().events)

        package_version = package.DebianVersion.stamp()  # Use same value across distributions (else keyring package may not be dist-upgradeable)
        for diststr in self.args["distributions"].value():
            repository, distribution, suite = util.models().parse_diststr(diststr)
            if not suite.build_keyring_package:
                raise util.HTTPBadRequest(f"Keyring package to non-keyring suite requested (see 'build_keyring_package' flag): '{diststr}'")
            self.result.append(package.upload_template_package(package.KeyringPackage(package_version=package_version), diststr))

        def unfinished():
            return [upload for upload in self.result if "error" not in upload and "event" not in upload]

        while unfinished():
            event = events_queue.get()
            for upload in unfinished():
                if event.match(types=[events.Type.INSTALLED, events.Type.FAILED, events.Type.REJECTED], distribution=upload["distribution"], source=upload["source"], version=upload["version"]):
                    LOG.debug("Keyring package result: %s", event)
                    upload["event"] = event.type.name
                    if (event.type == events.Type.INSTALLED) and not self.args["without_migration"].value():
                        repository, distribution, suite = util.models().parse_diststr(upload["distribution"])
                        repository.mbd_package_migrate(upload["source"], distribution, suite, full=True, version=upload["version"])


class TestPackages(_Admin, _Running, _Confirm, _CatAdministrator, Call):
    """
    Build test packages
    """

    __TEMPLATES = ["mbd-test-archall", "mbd-test-cpp", "mbd-test-ftbfs"]

    @classmethod
    def iarguments(cls):
        yield ListArgument(["--sources", "-S"], default=cls.__TEMPLATES, choices=cls.__TEMPLATES, doc="Test source packages to use")
        yield Distributions(["--distributions", "-D"], default=values.active_experimental_distributions, choices=values.active_uploadable_distributions)
        yield Distributions(["--auto-ports", "-A"], default=[], choices=values.active_uploadable_distributions)
        yield BoolArgument(["--with-check", "-c"], doc="Check for correct packager results")

    def _run(self):
        self.result = []
        events_queue = events.Attach(daemon.get().events)

        for source in self.args["sources"].value():
            for diststr in self.args["distributions"].value():
                self.result.append(package.upload_template_package(package.TestPackage(source, auto_ports=self.args["auto_ports"].value()), diststr))

        if self.args["with_check"].value():
            def unfinished():
                return [upload for upload in self.result if "error" not in upload and "event" not in upload]

            while unfinished():
                event = events_queue.get()
                for upload in unfinished():
                    if event.match(types=[events.Type.INSTALLED, events.Type.FAILED, events.Type.REJECTED], distribution=upload["distribution"], source=upload["source"], version=upload["version"]):
                        upload["event"] = event.type.name
                        if (event.type == events.Type.FAILED and upload["source"] in ["mbd-test-archall", "mbd-test-cpp"]) or \
                           (event.type == events.Type.INSTALLED and upload["source"] in ["mbd-test-ftbfs"]):
                            raise util.HTTPBadRequest(f"Test package failed: {event}")
                        LOG.debug("Test package result: %s", event)


class Setup(_Admin, _Confirm, _Maintenance, _CatAdministrator, Call):
    """
    Create, update or inspect your setup
    """

    @classmethod
    def iarguments(cls):
        yield StrArgument(["--update"], default="", doc="Update existing instances; 'all' to update all, empty string to never update, <hash_id>,.. (see a previous run) to update selected instances", header="Run Options")
        yield StrArgument(["--pca"], default="", doc="Prepare, check and create instances; 'all' to pca all, empty string to never pca, <hash_id>,.. (see a previous run) to pca selected instances")

        yield StrArgument(["--identity"], default=values.default_identity, doc="Instance identity (for keyring package names, dput config, ...)", header="Daemon")
        yield StrArgument(["--ftp-endpoint"], default=values.default_ftp_endpoint, doc="FTP (incoming) network endpoint")

        yield ListArgument(
            ["--from-origins"],
            default=[],
            doc=(
                "Set list of origins (a.k.a. vendors) (as per ``distro-info``, ``di``): <origin>[:lts|all],...\n"
                "\n"
                "This will be used by ``--foo-from-origins`` options.\n"
                "\n"
                "Per-origin modifiers:\n"
                "\n"
                "n/a: No modifier (default): 'di-supported' codenames\n"
                "lts: 'di-supported' plus 'di-lts' codenames (Note: adds 'lts', 'elts' for Debian and 'esm' for Ubuntu)\n"
                "all: All codenames with a working setup from these origins (i.e., includes very old ones)"
            ),
            header="Generic")

        yield ListArgument(["--archives", "-A"], default=[], doc="Add arbitrary archives", header="Archives")
        yield BoolArgument(["--archives-from-origins"], doc="Add original (internet) archives from ``--from-origins``")
        yield BoolArgument(["--archives-from-apt-sources"], doc="Add archives guessed from your local sources.list")
        yield BoolArgument(["--archives-from-proxy"], doc="Add archives guessed from a locally running apt-cacher-ng")

        yield ListArgument(["--sources"], default=[], doc="Manually select codenames to setup sources for", header="Sources")
        yield BoolArgument(["--sources-from-origins"], doc="Add sources from ``--from-origins``")

        yield ListArgument(
            ["--distributions"],
            default=[],
            doc=(
                "Distributions to setup ``<codename>:<arch0>[+<arch1>..]``\n"
                "For architectures, you may use the special key word 'native' for this host's natively supported architectures.\n"
                "Please see tool ``arch-test`` for a decisive list of actually available architectures, including emulated ones.\n"
                "Example: sid:native+arm64,bullseye:native\n"
            ),
            header="Distributions")
        yield BoolArgument(["--distributions-from-sources"], doc="Auto-add distributions for all given ``sources`` codenames with native architectures")

        yield ListArgument(
            ["--repositories"],
            default=[],
            doc=(
                f"Repositories to setup ``<id>:<layout>:<codename>[+<codename>..]``\n"
                f"Setup layouts available: {'|'.join(dist.SETUP['layout'].keys())}\n"
                f"Example: test:Default:sid+bullseye,test2:Default:sid\n"
                "Note that repository IDs 'test' and 'debdev' are special names with hardcoded taintings:\n"
                "* ``test`` repo will be uploadable w/o authentication\n"
                "* `debdev`` repo will be uploadable for Debian Developers (per installed ``debian-keyring``), and Layout 'Debian Developer' by default (``sid`` uploadable as ``unstable``)\n"
            ),
            header="Repositories")
        yield ListArgument(["--repositories-from-distributions"], default=[], doc="AutoSetup these repositories with all setup sources: '<id>[:<layout>]'")

        yield ListArgument(
            ["--chroots"],
            default=[],
            doc="Chroots to setup (uses same syntax as ``--distributions``).",
            header="Chroots")
        yield BoolArgument(["--chroots-from-sources"], doc="Auto-add chroots for all given ``sources`` codenames with native architectures")
        yield ChoiceArgument(["--chroots-backend", "-C"], default=values.default_chroot_backend, choices=["Dir", "File", "LVM", "LoopLVM", "BtrfsSnapshot"], doc="Chroot backend to use")

        yield ListArgument(["--remotes"], default=[], doc=f"Remotes to add. {inspect.getdoc(net.ClientEndpoint)}", header="Remotes")

    class Report(dict):
        SEVERITY_MESSAGES = {
            0: "No issues found",
            1: "Notable issues found",
            2: "Warnings found",
            3: "Errors found",
        }

        def _add(self, key, severity, quantity, search=""):
            self[key] = {
                "severity": severity if quantity > 0 else 0,
                "quantity": quantity,
                "search": search,
            }

        def __init__(self, result):
            self._add("Total instances traversed", 0, len(result["instances"]))
            self._add("Differences from setup", 2, sum((1 for i in result["instances"] if i["diff"]["setup"]["diff"])), '"setup diff"')
            self._add("Differences from model defaults", 1, sum((1 for i in result["instances"] if i["diff"]["model"]["diff"])), '"model diff"')
            for status, severity in Setup.Instance.REPORT_STATUS.items():
                self._add(f"{status.name.capitalize()} instances", severity, sum((1 for i in result["instances"] if i["status"] == status.name)), status.name)
            self.severity = max(i["severity"] for i in self.values())
            self.timestamp = util.Datetime.now()

        def __str__(self):
            return f"Inspection: {self.SEVERITY_MESSAGES[self.severity]} (on {self.timestamp})"

    @classmethod
    def report_path(cls):
        return config.ROUTES["log"].path.join("inspect.json")

    @classmethod
    def report_load(cls):
        path = cls.report_path()
        if os.path.exists(path):
            with util.fopen(path) as r:
                report = cls.Report(json.load(r))
                report.timestamp = util.Datetime.from_path(cls.report_path())
                return report
        return None

    def report_save(self):
        with util.fopen(self.report_path(), "w") as r:
            json.dump(self.result, r)

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.report = None

    class Instance:
        class Status(enum.Enum):
            MISSING = enum.auto()      # Expected instance not there
            EXISTS = enum.auto()
            ACTIVE = enum.auto()
            CHANGED = enum.auto()      # Changed inactive status instance
            FAILED = enum.auto()       # Failed (check failed) inactive status instance
            DEACTIVATED = enum.auto()  # Deactivated (presumably on purpose by user) status instance

        #: Report instances for these statuses with given severity
        REPORT_STATUS = {
            Status.MISSING: 2,
            Status.CHANGED: 2,
            Status.FAILED: 3,
            Status.DEACTIVATED: 1,
        }

        def diff(self):
            """Overload/expand this for additional custom diffs"""
            diff = {
                "setup": {
                    "fields": [],
                    "diff": {},
                },
                "model": {
                    "fields": [],
                    "diff": {},
                },
            }
            if self.obj is not None:
                setup = {**self.identifiers, **self.options}
                target = self.model(**setup)
                for field in (f for f in self.model.mbd_get_fields(exclude=["id", "status", "last_checked", "ping"]) if not f.is_relation):
                    data = diff["setup"] if field.name in setup else diff["model"]

                    data["fields"].append(field.name)
                    current_value = getattr(self.obj, field.name, None)
                    target_value = getattr(target, field.name, None)
                    # Don't consider fields where the target value is empty -- most likely valid manual changes
                    # Exception: "sbuildrc_snippet", "chroot_setup_script": Should be empty, most likely obsoleted by sbuild blocks
                    if (field.name in ["sbuildrc_snippet", "chroot_setup_script"] and target_value != current_value) or target_value not in ["", current_value]:
                        data["diff"][field.name] = {
                            "current": str(current_value),
                            "target": str(target_value),
                        }
            return diff

        @classmethod
        def _add_m2m_diff(cls, diff, field, expected, current):
            diff["setup"]["fields"].append(field)
            _expected = set(expected)
            _current = set(current)
            if _expected - _current:
                diff["setup"]["diff"].setdefault(field, {})["missing"] = list(_expected - _current)
            if _current - _expected:
                diff["setup"]["diff"].setdefault(field, {})["unknown"] = list(_current - _expected)

        def _get_obj(self):
            """Override this for custom getter"""
            return self.model.mbd_get_or_none(**self.identifiers)

        def __init__(self, _call, model, options, update_args, **identifiers):
            self.call, self.model, self.is_status_model, self.options, self.update_args, self.identifiers = _call, model, issubclass(model, util.models().StatusModel), options, update_args, identifiers
            LOG.debug("Instance: %s(%s)", self.model, self.identifiers)

            self.identity = {
                "class": repr(self.model),
                "identifiers": {k: repr(v) for k, v in self.identifiers.items()},
            }
            self.identity_hash = hashlib.sha256(json.dumps(self.identity, sort_keys=True).encode(config.CHAR_ENCODING)).hexdigest()

            self.obj = self._get_obj()
            self.info = {
                "identity": self.identity,
                "identity_hash": self.identity_hash,
                "identity_str": f"{self.model.__name__}({self.identifiers})",
                "diff": None,
                "status": None,
                "actions": [],
            }

            if self.call.args["update"].value() in ["all"] + self.identity_hash.split(","):
                if self.obj is None:
                    self.obj = self.model(**self.identifiers, **self.options)
                    self.obj.save()
                    self.info["actions"].append("created")
                else:
                    for k, v in {**self.identifiers, **self.options}.items():
                        setattr(self.obj, k, v)
                    self.info["actions"].append("updated")

                if self.obj is not None:
                    self.update()
                    self.obj.save()

            if self.call.args["pca"].value() in ["all"] + self.identity_hash.split(","):
                if self.obj is not None and issubclass(self.model, util.models().StatusModel):
                    try:
                        self.model.Admin.mbd_pca(self.obj, force=True)
                        self.info["actions"].append("pca")
                    except BaseException as e:
                        self.info["actions"].append("pca(failed)")
                        util.log_exception(LOG, f"PCA failed on {self.obj}", e)

            self.info["diff"] = self.diff()

            if self.obj is None:
                status = self.Status.MISSING
            elif not self.is_status_model:
                status = self.Status.EXISTS
            elif self.obj.mbd_is_active():
                status = self.Status.ACTIVE
            elif self.obj.mbd_is_changed():
                status = self.Status.CHANGED
            elif self.obj.mbd_is_reactivate():
                status = self.Status.FAILED
            else:
                status = self.Status.DEACTIVATED
            self.info["status"] = status.name
            self.info["status_severity"] = self.REPORT_STATUS.get(status, 0)

            if self.obj is not None and not self.model.__name__.endswith("Option"):
                # Help for HTML template only
                self.info["change_instance_url"] = f"admin:mini_buildd_{self.model.__name__.lower()}_change"
                self.info["change_instance_pk"] = str(self.obj.pk)
            self.call.result["instances"].append(self.info)

        def update(self):
            pass

    @classmethod
    def ilocal_archive_urls(cls):
        try:
            import aptsources.sourceslist  # pylint: disable=import-outside-toplevel
            for src in (src for src in aptsources.sourceslist.SourcesList() if not src.invalid and not src.disabled):
                # These URLs come from the user. 'normalize' the uri first to have exactly one trailing slash.
                yield src.uri.rstrip("/") + "/"
        except BaseException as e:
            util.log_exception(LOG, "Failed to scan local sources.lists for default mirrors ('python-apt' not installed?)", e)

    @classmethod
    def iapt_cacher_archive_urls(cls):
        url = net.detect_apt_cacher_ng(url=f"http://{config.HOSTNAME_FQDN}:3142")
        if url:
            LOG.debug("Local apt-cacher-ng detected: %s", url)
            for setup in dist.SETUP["origin"].values():
                for path in setup.get("archive_paths", []):
                    LOG.debug("Local proxy archive: '%s/%s/'", url, path)
                    yield f"{url}/{path}/"

    class Dists(dict):
        @classmethod
        def iexpand_arch(cls, arch):
            if arch == "native":
                yield from dist.Archs.native()
            else:
                yield arch

        @classmethod
        def iexpand_archs(cls, archs):
            for arch in archs:
                yield from cls.iexpand_arch(arch)

        def set(self, codename, archs):
            self.setdefault(codename, [])
            for arch in self.iexpand_archs(archs):
                if arch not in self[codename]:
                    self[codename].append(arch)

        def __init__(self, items=None):
            """
            ``<codename>:<arch0>+<arch1>...<sep>...`` to ``{codename: uniq_archlist}``
            """
            _items = [] if items is None else items
            for d in _items:
                codename, dummy, _archs = d.partition(":")
                self.set(codename, util.esplit(_archs, "+"))

        def as_argument_value(self):
            return [f"{codename}:{'+'.join(archs)}" for codename, archs in self.items()]

        def merge(self, dists):
            for codename, archs in dists.items():
                self.set(codename, archs)

    def __setup(self):
        setup = {}

        # Daemon
        setup["identity"] = self.args["identity"].value()
        setup["ftp_endpoint"] = self.args["ftp_endpoint"].value()

        # Generic
        from_origins = {}
        for origin_info in self.args["from_origins"].value():
            o, dummy, m = origin_info.partition(":")
            from_origins[o] = m

        # Archives
        archives = self.args["archives"].value()
        if self.args["archives_from_apt_sources"].value():
            archives.extend(self.ilocal_archive_urls())
        if self.args["archives_from_proxy"].value():
            archives.extend(self.iapt_cacher_archive_urls())
        if self.args["archives_from_origins"].value():
            for o in from_origins:
                archives.extend(dist.SETUP["origin"][o]["archive"])
        setup["archives"] = util.uniq(archives)

        # Sources
        sources = self.args["sources"].value()
        if self.args["sources_from_origins"].value():
            for o, m in from_origins.items():
                sources.extend(dist.setup_codenames_from_origin(o, m))
        setup["sources"] = util.uniq(sources)

        # Distributions
        setup["distributions"] = Setup.Dists(self.args["distributions"].value())
        if self.args["distributions_from_sources"].value():
            setup["distributions"].merge({c: dist.Archs.native() for c in setup["sources"]})

        # Repositories
        setup["repositories"] = {}
        for repo_preset in self.args["repositories"].value():
            repo, dummy, rest = repo_preset.partition(":")
            layout, dummy, _dists = rest.partition(":")
            setup["repositories"].setdefault(repo, {})
            setup["repositories"][repo]["dists"] = util.esplit(_dists, "+")
            setup["repositories"][repo]["layout"] = layout if layout else "Debian Developer" if repo == "debdev" else "Default"

        for repo_preset in self.args["repositories_from_distributions"].value():
            repo, dummy, layout = repo_preset.partition(":")
            setup["repositories"].setdefault(repo, {})
            setup["repositories"][repo]["dists"] = list(setup["distributions"].keys())
            setup["repositories"][repo]["layout"] = layout if layout else "Debian Developer" if repo == "debdev" else "Default"

        # Chroots
        setup["chroots"] = Setup.Dists(self.args["chroots"].value())
        if self.args["chroots_from_sources"].value():
            setup["chroots"].merge({c: dist.Archs.native() for c in setup["sources"]})
        setup["chroots_backend"] = self.args["chroots_backend"].value()

        # Remotes
        setup["remotes"] = self.args["remotes"].value()

        return setup

    def __check(self):
        # Preliminary checks
        duplicate_distributions = []
        for distribution in util.models().Distribution.objects.all():
            same_codename = util.models().Distribution.objects.filter(base_source__codename=distribution.base_source.codename)
            if len(same_codename) != 1:
                duplicate_distributions.append(distribution.base_source.codename)
        if duplicate_distributions:
            raise util.HTTPBadRequest(f"Duplicate distributions found: {','.join(duplicate_distributions)}: This is unfortunately possible, but 'unintended use'. Please check if you really need them (``setup`` can't be used if you stick with it)")

    def __run(self):
        # Preliminary checks
        self.__check()

        setup = self.__setup()
        LOG.debug("Setup from args: %s", setup)

        self.result = {
            "setup": setup,   # Setup computed from args (info, debugging)
            "instances": [],  # Diffs in individual instances
            "report": {},     # Summary && unreported in instances
        }

        # Daemon
        Setup.Instance(self,
                       util.models().Daemon,
                       {
                           "identity": setup["identity"],
                           "ftpd_bind": setup["ftp_endpoint"],
                       },
                       {})

        # Archives
        for url in setup["archives"]:
            Setup.Instance(self, util.models().Archive, {}, {}, url=url)

        # Sources
        class AptKeyInstance(Setup.Instance):
            def _get_obj(self):
                """Also lookup traditional/deprecated (short) key (last 8 chars) -- there still might be objects having this"""
                obj = self.model.mbd_get_or_none(**self.identifiers)
                if obj is None:
                    obj = self.model.mbd_get_or_none(key_id=self.identifiers["key_id"][-8:])  # short key
                return obj

            def update(self):
                """Implicitly update key_id with long key_id from setup"""
                self.obj.key_id = self.identifiers["key_id"]

        class SourceInstance(Setup.Instance):
            def update(self):
                self.obj.apt_keys.clear()
                for apt_key in self.update_args["apt_key_instances"]:
                    if apt_key.obj is not None:
                        self.obj.apt_keys.add(apt_key.obj)

            def diff(self):
                diff = super().diff()
                if self.obj is not None:
                    self._add_m2m_diff(diff, "apt_keys", self.update_args["apt_keys"], [a.mbd_key_id() for a in self.obj.apt_keys.all()])

                    if dist.is_base_source(self.obj.origin, self.obj.codename) and self.obj.codeversion != self.obj.codeversion_guessed:
                        diff["model"]["diff"]["codeversion"] = {
                            "current": self.obj.codeversion,
                            "target": self.obj.codeversion_guessed,
                        }
                return diff

        for codename in (dist.Codename(c) for c in setup["sources"]):
            for source in [{"codename": codename.codename}] + codename.setup_extra_sources():
                apt_keys = source.get("apt_keys", codename.setup.get("apt_keys", []))
                SourceInstance(self,
                               util.models().Source,
                               source.get("options", {}),
                               {
                                   "apt_keys": apt_keys,
                                   "apt_key_instances": [AptKeyInstance(self, util.models().AptKey, {}, {}, key_id=long_key_id) for long_key_id in apt_keys],
                               },
                               origin=source.get("origin", codename.origin),
                               codename=source["codename"])

        # Distributions
        class DistributionInstance(Setup.Instance):
            def update(self):
                codename = self.update_args["codename"]

                # Extra Sources
                for ps in self.update_args["priority_source_instances"]:
                    if ps.obj is not None:
                        self.obj.extra_sources.add(ps.obj)

                # Components
                for component in self.update_args["component_instances"]:
                    if component.obj is not None:
                        self.obj.components.add(component.obj)

                # Architectures
                for arch in self.update_args["archs"]:
                    Setup.Instance(
                        self.call,
                        util.models().ArchitectureOption,
                        {
                            "optional": arch in codename.setup_arch_optional(),
                        },
                        {},
                        architecture=Setup.Instance(self.call, util.models().Architecture, {}, {}, name=arch).obj,
                        distribution=self.obj)

                archall = self.obj.architectureoption_set.filter(build_architecture_all=True)
                if len(archall) != 1:
                    LOG.debug("Fixing archall (now: %s)", archall)
                    archall_set = False  # Use first non-optional arch to build arch "all"
                    for ao in self.obj.architectureoption_set.all():
                        ao.build_architecture_all = not archall_set and not ao.optional
                        ao.save()
                        if ao.build_architecture_all:
                            archall_set = True

            def diff(self):
                diff = super().diff()
                if self.obj is not None:
                    self._add_m2m_diff(diff, "components",
                                       self.update_args["codename"].setup_components(),
                                       [c.name for c in self.obj.components.all()])
                    self._add_m2m_diff(diff, "extra_sources",
                                       [s.source.codename for s in util.models().PrioritySource.objects.filter(source__codename__regex=fr"^{self.obj.base_source.codename}[-/]")],
                                       [s.source.codename for s in self.obj.extra_sources.all()])
                return diff

        for codename, archs in ((dist.Codename(c), a) for c, a in setup["distributions"].items()):
            DistributionInstance(self,
                                 util.models().Distribution,
                                 codename.setup_distribution_options(),
                                 {
                                     "codename": codename,
                                     "priority_source_instances": [Setup.Instance(self,
                                                                                  util.models().PrioritySource,
                                                                                  {},
                                                                                  {},
                                                                                  source=util.models().Source.mbd_get_or_none(codename=extra_source["codename"], origin=extra_source.get("origin", codename.origin)),
                                                                                  priority=extra_source.get("priority", 1))
                                                                   for extra_source in codename.setup_extra_sources()],
                                     "component_instances": [Setup.Instance(self, util.models().Component, {}, {}, name=comp) for comp in codename.setup_components()],
                                     "archs": archs,
                                 },
                                 base_source=util.models().Source.mbd_get_or_none(codename=codename.codename, origin=codename.origin))

        # Repositories
        class LayoutInstance(Setup.Instance):
            def update(self):
                setup = self.update_args["setup"]
                for suite_name, suite_setup in setup["suites"].items():
                    suite_instance = Setup.Instance(self.call, util.models().Suite, {}, {}, name=suite_name)
                    Setup.Instance(self.call,
                                   util.models().SuiteOption,
                                   {
                                       **suite_setup.get("options", {}),
                                       "rollback": setup["rollback"].get(suite_name, 0),
                                       "migrates_to": util.models().SuiteOption.mbd_get_or_none(layout=self.obj, suite=util.models().Suite.mbd_get_or_none(name=suite_setup.get("migrates_to"))),
                                   },
                                   {},
                                   layout=self.obj,
                                   suite=suite_instance.obj)

        class RepositoryInstance(Setup.Instance):
            def update(self):
                for codename in self.update_args["dists"]:
                    distribution = util.models().Distribution.mbd_get_or_none(base_source__codename__exact=codename)
                    self.obj.distributions.add(distribution)

            def diff(self):
                diff = super().diff()
                if self.obj is not None:
                    self._add_m2m_diff(diff, "distributions",
                                       self.update_args["dists"],
                                       [d.base_source.codename for d in self.obj.distributions.all()])
                return diff

        for repo, _values in setup["repositories"].items():
            # Layouts
            options = {}
            layout_setup = dist.SETUP["layout"].get(_values["layout"], {})
            layout_instance = LayoutInstance(self, util.models().Layout, layout_setup.get("options", {}), {"setup": layout_setup}, name=_values["layout"])

            if repo == "test":
                options["allow_unauthenticated_uploads"] = True
            elif repo == "debdev":
                options["extra_uploader_keyrings"] = (
                    "# Allow Debian maintainers (must install the 'debian-keyring' package)\n"
                    "/usr/share/keyrings/debian-keyring.gpg"
                )

            options["layout"] = layout_instance.obj
            RepositoryInstance(self, util.models().Repository, options, {"dists": _values["dists"]}, identity=repo)

        # Chroots
        for codename, archs in ((dist.Codename(c), a) for c, a in setup["chroots"].items()):
            for arch in archs:
                Setup.Instance(self,
                               getattr(util.models(), f"{setup['chroots_backend']}Chroot"),
                               codename.setup_chroot_options(),
                               {},
                               source=util.models().Source.mbd_get_or_none(codename=codename.codename, origin=codename.origin),
                               architecture=Setup.Instance(self, util.models().Architecture, {}, {}, name=arch).obj)

        # Remotes
        for ep in setup["remotes"]:
            Setup.Instance(self, util.models().Remote, {}, {}, http=ep)

        # Report
        self.report = self.Report(self.result)

    def _run(self):
        self.__run()

    @classmethod
    def setup_inspect(cls):
        chroots = Setup.Dists()
        for c in util.models().Chroot.objects.all():
            chroots.merge(Setup.Dists([c.mbd_key()]))

        return Setup(
            identity=daemon.get_model().identity,
            ftp_endpoint=daemon.get_model().ftpd_bind,
            archives=[a.url for a in util.models().Archive.objects.all()],
            sources=[d.base_source.codename for d in util.models().Distribution.objects.all()],
            distributions=[d.mbd_setup() for d in util.models().Distribution.objects.all()],
            repositories=[r.mbd_setup() for r in util.models().Repository.objects.all()],
            chroots=chroots.as_argument_value(),
            remotes=[r.http for r in util.models().Remote.objects.all()],
        )

    @classmethod
    def setup_extend(cls):
        return Setup(
            identity=daemon.get_model().identity,
            ftp_endpoint=daemon.get_model().ftpd_bind,
            archives_from_origins=True,
            sources_from_origins=True,
            distributions_from_sources=True,
            repositories_from_distributions=[r.mbd_setup(with_dists=False) for r in util.models().Repository.objects.all()],
            chroots_from_sources=True,
        )


SETUP_BOOTSTRAP = {
    "Debian": Setup(update="all", pca="all", from_origins="Debian", archives_from_origins=True, sources_from_origins=True, distributions_from_sources=True, repositories_from_distributions="test", chroots_from_sources=True),
    "Ubuntu": Setup(update="all", pca="all", from_origins="Ubuntu", archives_from_origins=True, sources_from_origins=True, distributions_from_sources=True, repositories_from_distributions="test", chroots_from_sources=True),
}


class Calls(collections.OrderedDict):
    """Automatically collect all calls defined in this module, and make them accessible"""

    def __init__(self):
        super().__init__({c.name(): c for c in sys.modules[__name__].__dict__.values() if inspect.isclass(c) and issubclass(c, Call) and c != Call})


CALLS = Calls()
