import ftplib
import glob
import logging
import os
import re
import shutil

import debian.deb822

from mini_buildd import config, daemon, dist, events, files, net, sbuild, util

LOG = logging.getLogger(__name__)


class Base(debian.deb822.Changes):
    def __init__(self, initial_value):
        self.file_path = None
        if isinstance(initial_value, str):
            self.file_path = initial_value
            with util.fopen(initial_value) as cf:
                super().__init__(cf)
        else:
            super().__init__(initial_value)

        self.dfn = files.DebianName(self["Source"], self["Version"])

    def __str__(self):
        """Superclass would show all values multiline, so we need to replace with s.th. less invasive"""
        return self.dfn.changes(self.get("Architecture")) if self.file_path is None else self.file_name()

    def file_name(self):
        return None if self.file_path is None else os.path.basename(self.file_path)

    def dir_path(self):
        return None if self.file_path is None else os.path.dirname(self.file_path)

    def dsc_file_path(self):
        return os.path.join(os.path.dirname(self.file_path), self.dfn.dsc())

    def get_files(self, key=None):
        return [f[key] if key else f for f in self.get("Files", [])]

    def add_file(self, file_name):
        self.setdefault("Files", [])
        self["Files"].append({
            "md5sum": util.Hash(file_name).md5(),
            "size": os.path.getsize(file_name),
            "section": "mini-buildd",
            "priority": "extra",
            "name": os.path.basename(file_name),
        })

    def upload_file_path(self):
        return os.path.splitext(self.file_path)[0] + ".upload"

    def uploaded(self):
        return os.path.exists(self.upload_file_path())

    def _upload(self, ftp_cls, endpoint, force=False):
        pre_upload_timestamp = util.Datetime.now()

        upload = self.upload_file_path()
        if not force and os.path.exists(upload):
            with util.fopen(upload) as uf:
                LOG.warning("FTP: '%s' exists (already uploaded to '%s')", upload, uf.read())
        else:
            with ftp_cls() as ftp:
                ftp.connect(endpoint.options.get("host"), int(endpoint.options.get("port")))
                if isinstance(ftp, ftplib.FTP_TLS):
                    ftp.auth()
                    ftp.prot_p()
                ftp.login()
                ftp.cwd("/incoming")
                for fd in self.get_files() + [{"name": self.file_name()}]:
                    f = fd["name"]
                    LOG.debug("FTP: Uploading file: '%s'", f)
                    with open(os.path.join(os.path.dirname(self.file_path), f), "rb") as fi:
                        ftp.storbinary(f"STOR {f}", fi)
                with util.fopen(upload, "w") as fi:
                    fi.write(endpoint.geturl())
                LOG.info("FTP: '%s' uploaded to '%s'.", self.file_name(), endpoint.geturl())

        return pre_upload_timestamp  # for events --after=

    def upload(self, endpoint, force=False):
        return self._upload(ftplib.FTP_TLS if endpoint.is_ssl() else ftplib.FTP, endpoint, force=force)

    def tar(self):
        _files = [os.path.join(os.path.dirname(self.file_path), f) for f in self.get_files("name")] + [self.file_path]
        return files.Tar().add(_files)

    def untar(self, dir_path):
        files.Tar.extract(self.file_path + ".tar", dir_path)

    def save_as(self, file_path, gnupg=None):
        """Write to file (optionally signed)"""
        LOG.debug("Saving changes: %s", file_path)
        os.makedirs(os.path.dirname(file_path), exist_ok=True)  # Be sure base dir is always available
        with open(file_path, "w+", encoding=config.CHAR_ENCODING) as f:
            f.write(self.dump())
        if gnupg is not None:
            LOG.debug("Signing changes: %s", file_path)
            gnupg.sign(file_path)
        self.file_path = file_path

    def _move_to(self, dir_path, mv):
        """Move (or copy) changes file along with all accompanying files (needs ``file_path`` to be non-None)"""
        LOG.debug("%s changes: '%s'->'%s'", mv.__name__, self.file_path, dir_path)
        os.makedirs(dir_path, exist_ok=True)
        for fd in [{"name": self.file_name()}] + self.get_files():
            f = fd["name"]
            f_abs = os.path.join(self.dir_path(), f)
            LOG.debug("%s '{f}' to '%s'", mv.__name__, dir_path)
            mv(f_abs, os.path.join(dir_path, f))
        self.file_path = os.path.join(dir_path, self.file_name())

    def move_to(self, path):
        return self._move_to(path, mv=os.rename)

    def copy_to(self, path):
        return self._move_to(path, mv=shutil.copy)


class Changes(Base):
    FILE_APX = ""
    #: <key>: <public_hint>
    MANDATORY = {"Distribution": "", "Source": "", "Version": "", "Architecture": ""}

    @classmethod
    def match(cls, file_path):
        return re.compile(f"^.+{files.DebianName.uext(cls.FILE_APX)}_[^_]+.changes$").match(file_path)

    def __init__(self, initial_value):
        super().__init__(initial_value)

        for key, hint in self.MANDATORY.items():
            if key not in self:
                hint_apx = f" ({hint})" if hint else ""
                raise util.HTTPBadRequest(f"Changes missing mandatory key: {key}{hint_apx}")

        self._timecode = util.Datetime.timecode()

        #: Key to identify if a 'Package' (Upload, Buildrequest, Buildresult).
        self.key = os.path.join(self["Source"], self["Version"], self.timecode)
        #: Key to identify a 'Build' (Buildrequest, Buildresult).
        self.bkey = os.path.join(self.key, self["Architecture"])

    def get_events_path(self, *args):
        return os.path.join(config.ROUTES["events"].path.full, self.bkey, *args)

    @property
    def timecode(self):
        return self.cget("Timecode", self._timecode)

    def cget(self, field, default=None):
        return self.get(util.CField(field).fullname, default=default)

    def cset(self, field, value):
        self[util.CField(field).fullname] = value

    def fields(self):
        return {util.Field(field): value for field, value in self.items()}

    def to_event_json(self):
        return {
            **{
                "timecode": self.timecode,
                "key": self.key,
                "bkey": self.bkey,
                "changes": os.path.join(self.bkey, self.file_name()) if self.file_name() else None,
            },
            **{field.snake_name: value for field, value in self.fields().items() if field.is_cfield or field.snake_name in ["architecture", "maintainer", "changed_by"]},
        }

    def save_to(self, dir_path, tar=None):
        os.makedirs(dir_path, exist_ok=True)
        file_path = os.path.join(dir_path, self.dfn.changes(self["Architecture"], self.FILE_APX))
        if tar is not None:
            tar_path = file_path + ".tar"
            tar.save_as(tar_path)
            self.add_file(tar_path)
        self.save_as(file_path, daemon.get().gnupg)


class Upload(Changes):
    class Options():
        """
        Uploader options in changes

        >>> config.ROUTES = config.Routes("./test-data/home")   # We need ROUTES setup

        >>> f"{Upload.Options(Base('test-data/changes.options'))}"
        "auto-ports=['jessie-test-unstable', 'squeeze-test-snasphot'], autopkgtest-mode=errfail, internal-apt-priority=543, lintian-mode=errfail, piuparts-mode=errfail"

        >>> f"{Upload.Options(Base('test-data/changes.options_1.0'))}"
        "auto-ports=['jessie-test-unstable', 'squeeze-test-snasphot'], internal-apt-priority=543, lintian-mode=errfail, lintian-mode[i386]=disabled"

        >>> f"{Upload.Options(Base('test-data/changes.options_0.8'))}"
        "auto-ports=['jessie-test-unstable', 'squeeze-test-snasphot'], lintian-mode=ignore"
        """

        class Bool():
            _TRUE = ["true", "1"]
            _FALSE = ["false", "0"]
            _VALID = _TRUE + _FALSE

            def __init__(self, raw_value):
                if raw_value.lower() not in self._VALID:
                    raise util.HTTPBadRequest(f"Bool value must be one of {','.join(self._VALID)}")
                self.value = raw_value.lower() in self._TRUE

        class Int():
            def __init__(self, raw_value):
                self.value = int(raw_value)

        class Str():
            def __init__(self, raw_value):
                self.value = str(raw_value)

        class CSV():
            def __init__(self, raw_value):
                self.value = raw_value.split(",")

        KEYWORD = "MINI_BUILDD_OPTION"

        _OPTIONS_DEPRECATED = {
            "ignore-lintian": Bool,
            "run-lintian": Bool,
        }

        _OPTIONS = {**{
            "lintian-mode": Str,
            "piuparts-mode": Str,
            "autopkgtest-mode": Str,
            "internal-apt-priority": Int,
            "auto-ports": CSV,
            "deb-build-options": Str,
            "deb-build-profiles": Str,
            "add-depends": Str,
        }, **_OPTIONS_DEPRECATED}

        @classmethod
        def usage(cls):
            return (
                "(lintian|piuparts|autopkgtest)-mode[<arch>]=<mode>\n"
                f"  {dist.SbuildCheck.usage()}\n"
                "internal-apt-priority[<arch>]=<prio>\n"
                "  Apt prio for our repo packages when building (use 500 to always prefer).\n"
                "auto-ports=<dist>,...\n"
                "  List of distributions (comma-separated) to automatically run ports for after successful install.\n"
                "deb-build-options[<arch>]=<option> <option>...\n"
                "  Contents of DEB_BUILD_OPTIONS environment for building (see https://www.debian.org/doc/debian-policy/ch-source.html#debian-rules-and-deb-build-options).\n"
                "deb-build-profiles[<arch>]=<profile> <profile>...\n"
                "  Contents of DEB_BUILD_PROFILES environment for building (see https://wiki.debian.org/BuildProfileSpec).\n"
                "add-depends[<arch>]=<dep>,<dep>...\n"
                "  List of additional dependencies (comma-separated, like in ``debian/control``) (see ``sbuild --add-depends``).\n"
            )

        @classmethod
        def _get_top_changes(cls, upload_changes):
            """
            Filter only the first block from the changes (changelog) entry

            Upload changes may include multiple version blocks from
            the changelog (internal porting does it, for example),
            but we must only consider values from the top one.
            """
            result = ""
            header_found = False
            for line in upload_changes.get("Changes", "").splitlines(True):
                if re.match(r"^ [a-z0-9]+", line):
                    if header_found:
                        break
                    header_found = True
                result += line
            return result

        def _parse_ancient(self):
            """Compat parse support for ancient 'magic' options"""
            def warning(ancient, option):
                LOG.warning("Ancient user option \"%s\" found. Please use option \"%s\" instead (see user manual).", ancient, option)

            ancient_auto_backports = re.search(r"\*\s*MINI_BUILDD:\s*AUTO_BACKPORTS:\s*([^*.\[\]]+)", self._top_changes)
            if ancient_auto_backports:
                warning("AUTO_BACKPORTS", "auto-ports")
                self._set("auto-ports", ancient_auto_backports.group(1))

            ancient_backport_mode = re.search(r"\*\s*MINI_BUILDD:\s*BACKPORT_MODE", self._top_changes)
            if ancient_backport_mode:
                warning("BACKPORT_MODE", "lintian-mode")
                self._set("lintian-mode", "ignore")

        def __init__(self, upload_changes):
            self._top_changes = self._get_top_changes(upload_changes)
            self._options = {}
            matches = re.findall(fr"\*\s*{self.KEYWORD}:\s*([^*.]+)=([^*.]+)", self._top_changes)
            for m in matches:
                self._set(m[0], m[1])

            self._parse_ancient()

        def __str__(self):
            return ", ".join(f"{key}={value}" for key, value in sorted(self._options.items()))

        def _set(self, key, raw_value):
            key_part = key.partition("[")
            base_key = key_part[0]
            value = re.sub(r"\s+", "", raw_value)

            # Validity check for key
            if base_key not in list(self._OPTIONS):
                raise util.HTTPBadRequest(f"Unknown upload option: {key}")

            # Duplicity check
            if key in list(self._options):
                raise util.HTTPBadRequest(f"Duplicate upload option: {key}")

            # Value conversion check
            converted_value = None
            try:
                converted_value = self._OPTIONS[base_key](value)
            except BaseException as e:
                raise util.HTTPBadRequest(f"Invalid upload option value: {key}=\"{value}\"") from e

            # Handle deprecated options
            if base_key in list(self._OPTIONS_DEPRECATED):
                oldkey = key
                oldvalue = converted_value.value
                alt = key_part[1] + key_part[2]
                if base_key == "ignore-lintian":
                    key = f"lintian-mode{alt}"
                    converted_value.value = "ignore" if oldvalue else "errfail"
                elif base_key == "run-lintian":
                    key = f"lintian-mode{alt}"
                    converted_value.value = "errfail" if oldvalue else "disabled"
                LOG.warning("User Option: Converted DEPRECATED %s=%s to %s=%s", oldkey, oldvalue, key, converted_value.value)

            self._options[key] = converted_value.value

            LOG.debug("Upload option set: %s=\"%s\"", key, value)

        def get(self, key, alt=None, default=None):
            """Get first existing option value in this order: key[a], key, default"""
            # Validity check for key
            if key not in list(self._OPTIONS):
                raise util.HTTPInternal(f"Unknown key used for get(): {key}")

            if alt:
                m_key = f"{key}[{alt}]"
                if m_key in self._options:
                    return self._options.get(m_key, default)
            return self._options.get(key, default)

    MANDATORY = {**Changes.MANDATORY, **{"Changed-By": "Maybe the changelog entry was not finalised?"}}

    def __init__(self, initial_value, **kwargs):
        super().__init__(initial_value, **kwargs)
        self.options = self.Options(self)
        # (Optionally map and) check distribution string integrity
        self.dist = dist.Dist(util.models().map_distribution(self["Distribution"]))

    def check_mode(self, checker, alt, fallback_mode_int):
        """Get check mode from upload options, with fallback"""
        options_mode_str = self.options.get(f"{checker}-mode", alt=alt)
        fallback_mode_str = dist.SbuildCheck.Mode(fallback_mode_int).name
        return dist.SbuildCheck(checker, options_mode_str if options_mode_str is not None else fallback_mode_str)

    def request_builds(self, repository, distribution, suite_option):
        """Build buildrequest files for all architectures"""
        try:
            with util.fopen(self.dsc_file_path()) as dsc_file:
                dsc = debian.deb822.Dsc(dsc_file)
        except FileNotFoundError as e:
            raise util.HTTPBadRequest(f"Missing DSC file '{self.dfn.dsc()}' (Maybe ``dpkg-buildpackage`` was called with '-b/-B'?)") from e

        # Get all DSC/source package files:
        # - Check md5 against possible pool files.
        # - Add missing from pool (i.e., orig.tar.gz).
        # - make sure all files from dsc are actually available
        files_from_dsc = []
        for f in dsc["Files"]:
            from_pool = None
            for p in glob.glob(os.path.join(repository.mbd_path, "pool", "*", "*", self["Source"], f["name"])):
                if f["md5sum"] != util.Hash(p).md5():
                    raise util.HTTPBadRequest(f"MD5 mismatch in uploaded DSC vs. pool: {f['name']}")
                from_pool = p

            if f["name"] in self.get_files(key="name"):
                files_from_dsc.append(os.path.join(os.path.dirname(self.dsc_file_path()), f["name"]))
            elif from_pool is not None:
                files_from_dsc.append(from_pool)
                LOG.info("%s: File added from pool: %s", self, from_pool)
            else:
                raise util.HTTPBadRequest(f"Missing file '{f['name']}' neither in upload, nor in pool (use '-sa' for uploads with new upstream)")

        buildrequest_dict = {}
        for ao in distribution.architectureoption_set.all():
            path = os.path.join(self.dir_path(), ao.architecture.name)   # Usually: <source>/<version>/<timecode>/source/<arch>/
            os.makedirs(path, exist_ok=True)

            buildrequest = Buildrequest({
                "Distribution": self.dist.get(),
                "Source": self["Source"],
                "Version": self["Version"],
                "Architecture": ao.architecture.name,
                util.CField("Timecode").fullname: self.timecode,
            })

            # Generate config files
            config_path = os.path.join(path, sbuild.CONFIG_DIR)
            os.makedirs(config_path, exist_ok=True)
            config_files = {
                sbuild.CONFIG_APT_SOURCES_LIST: repository.mbd_get_apt_build_sources_list(distribution, suite_option).get(),
                sbuild.CONFIG_APT_PREFERENCES: repository.mbd_get_apt_build_preferences(distribution, suite_option, self.options.get("internal-apt-priority")),
                sbuild.CONFIG_APT_KEYS: repository.mbd_get_apt_keys(distribution),
                sbuild.CONFIG_SSL_CERT: util.http_endpoint().get_certificate(),
                sbuild.CONFIG_CHROOT_SETUP_SCRIPT: distribution.chroot_setup_script,
                sbuild.CONFIG_SBUILDRC_SNIPPET: distribution.sbuildrc_snippet,
            }
            for file_name, content in config_files.items():
                with util.fopen(os.path.join(config_path, file_name), "w") as f:
                    f.write(content)

            # Generate tar
            tar = files.Tar()
            tar.add([os.path.join(config_path, file_name) for file_name in config_files], arcdir=sbuild.CONFIG_DIR)
            tar.add([self.dsc_file_path()])
            tar.add(files_from_dsc)

            buildrequest.cset("Upload-To", daemon.get_model().mbd_get_ftp_endpoint().geturl())
            buildrequest.cset("Packager", util.http_endpoint().geturl())
            if ao.build_architecture_all:
                buildrequest.cset("Arch-All", "Yes")
            buildrequest.cset("Build-Dep-Resolver", distribution.get_build_dep_resolver_display())

            buildrequest.cset(sbuild.CONFIG_BLOCKS.name, distribution.sbuild_config_blocks)
            buildrequest.cset(sbuild.SETUP_BLOCKS.name, distribution.sbuild_setup_blocks)

            buildrequest.cset("Deb-Build-Options", self.options.get("deb-build-options", ao.architecture.name, distribution.deb_build_options))
            buildrequest.cset("Deb-Build-Profiles", self.options.get("deb-build-profiles", ao.architecture.name, distribution.deb_build_profiles))
            buildrequest.cset("Add-Depends", self.options.get("add-depends", ao.architecture.name, distribution.add_depends))

            buildrequest.cset("Lintian-Mode", self.check_mode("lintian", ao.architecture.name, distribution.lintian_mode).mode.name)
            buildrequest.cset("Lintian-Extra-Options", distribution.lintian_extra_options)
            buildrequest.cset("Lintian-Warnfail-Options", distribution.lintian_warnfail_options)
            buildrequest.cset("Autopkgtest-Mode", self.check_mode("autopkgtest", ao.architecture.name, distribution.autopkgtest_mode).mode.name)
            buildrequest.cset("Piuparts-Mode", self.check_mode("piuparts", ao.architecture.name, distribution.piuparts_mode).mode.name)

            buildrequest.cset("Buildlog", os.path.join(buildrequest.bkey, self.dfn.buildlog(ao.architecture.name)))

            builders = util.models().Builders(wake=True, check=True).get(self.dist.codename, ao.architecture.name)
            for _load, builder in sorted(builders.items()):
                try:
                    buildrequest.cset("Builder", builder["url"])
                    buildrequest.cset("Buildlog-Building", config.URIS["builds"]["dir"].url_join(buildrequest.cget("Buildlog"), endpoint=net.ClientEndpoint(builder["url"])))
                    buildrequest.cset("Buildlog-Installed", config.URIS["events"]["dir"].url_join(buildrequest.cget("Buildlog")))
                    buildrequest.save_to(self.get_events_path(ao.architecture.name), tar)
                    buildrequest.upload(net.ClientEndpoint(builder["incoming_url"]))
                    buildrequest_dict[ao.architecture.name] = buildrequest
                    break
                except BaseException as e:
                    util.log_exception(LOG, f"Uploading to '{builder['incoming_url']}' failed", e, level=logging.ERROR)
            if ao.architecture.name not in buildrequest_dict:
                raise util.HTTPUnavailable(f"No working builder found for {ao.architecture.name}/{self.dist.codename}")
        return buildrequest_dict


class Buildrequest(Changes):
    FILE_APX = "mini-buildd-buildrequest"

    def __init__(self, initial_value, **kwargs):
        super().__init__(initial_value, **kwargs)
        self.dist = dist.Dist(self["Distribution"])

    def check_mode(self, checker):
        """Get check mode from checker (lintian, piuparts, autopkgtest)"""
        return dist.SbuildCheck(checker, self.cget(f"{checker.capitalize()}-Mode", dist.SbuildCheck.Mode.DISABLED.name))

    def check_extra_options(self, checker):
        """Get check mode from checker (lintian, piuparts, autopkgtest)"""
        return self.cget(f"{checker.capitalize()}-Extra-Options", "")

    def gen_buildresult(self):
        bres_path = self.dfn.changes(self["Architecture"], Buildresult.FILE_APX)
        if os.path.exists(bres_path):
            return Buildresult(bres_path)
        return Buildresult({k: self[k] for k in ["Distribution", "Source", "Version", "Architecture"] + [util.CField(cfield).fullname for cfield in ["Timecode", "Builder", "Buildlog", "Buildlog-Installed", "Upload-To"]]})

    def schroot_name(self):
        """
        Schroot name w/o namespace (see ``schroot --list``)

        Must produce same as :func:`~models.chroot.Chroot.mbd_schroot_name`.
        """
        return f"mini-buildd-{self.dist.codename}-{self['Architecture']}"


class Buildresult(Changes):
    FILE_APX = "mini-buildd-buildresult"

    def success(self, upload, distribution, ignore_checks):
        arch = self["Architecture"]
        status = self.cget("Sbuild-Status")
        lintian = self.cget("Sbuild-Lintian")
        piuparts = self.cget("Sbuild-Piuparts")
        autopkgtest = self.cget("Sbuild-Autopkgtest")

        def check(arch, status, checker, mode):
            """
            Check if arch is allowed to succeed based on check results

            Succeeds if
            - the actual check run has no errors or warnings
            - the distribution is configured to ignore check results
            - upload says to ignore check results
            - suite is experimental (we deliberately ignore check in that case)
            - warnings are present, but check mode is not warnfail
            """
            check_mode = upload.check_mode(checker, arch, mode)
            return check_mode.check(status, ignore=ignore_checks)

        def check_all():
            return \
                check(arch, lintian, "lintian", distribution.lintian_mode) and \
                check(arch, piuparts, "piuparts", distribution.piuparts_mode) and \
                check(arch, autopkgtest, "autopkgtest", distribution.autopkgtest_mode)

        return check_all() if status == "successful" else status == "skipped"


def incoming2queue(file_path):
    LOG.debug("Trying to queue incoming: %s", file_path)
    try:
        if Buildresult.match(file_path):
            changes = Buildresult(file_path)
            changes.move_to(changes.get_events_path())
            daemon.get().packager.queue.put(changes)
        elif Buildrequest.match(file_path):
            changes = Buildrequest(file_path)
            changes.move_to(config.ROUTES["builds"].path.join(changes.bkey))
            daemon.get().builder.queue.put(changes)
        else:  # User Upload
            changes = Upload(file_path)
            changes.move_to(changes.get_events_path())
            daemon.get().packager.queue.put(changes)
    except Exception as e:
        # If it at least qualifies as Changes, still try error/event handling for user experience
        changes = Changes(file_path)
        changes.move_to(changes.get_events_path())
        daemon.get().events.log(events.Type.REJECTED, changes, e)
