from __future__ import annotations

import importlib
import os
from contextlib import suppress
from typing import Callable
from typing import TYPE_CHECKING

from dynaconf import constants as ct
from dynaconf import default_settings
from dynaconf.loaders import ini_loader
from dynaconf.loaders import json_loader
from dynaconf.loaders import py_loader
from dynaconf.loaders import toml_loader
from dynaconf.loaders import yaml_loader
from dynaconf.loaders.base import SourceMetadata
from dynaconf.utils import deduplicate
from dynaconf.utils import ensure_a_list
from dynaconf.utils.boxing import DynaBox
from dynaconf.utils.files import get_local_filename
from dynaconf.utils.files import glob
from dynaconf.utils.files import has_magic
from dynaconf.utils.parse_conf import false_values

if TYPE_CHECKING:
    from dynaconf.base import Settings


def default_loader(obj, defaults=None):
    """Initial loader for the initialization process.

    Steps:
    - Load default settings (from static module) + kwargs overrides (together)
    - Load envvar overrides
    """
    # LOAD DEFAULT STATIC + KWARGS OVERRIDES

    defaults = defaults or {}
    default_settings_values = {
        key: value
        for key, value in default_settings.__dict__.items()  # noqa
        if key.isupper()
    }

    all_keys = deduplicate(
        list(defaults.keys()) + list(default_settings_values.keys())
    )

    for key in all_keys:
        if not obj.exists(key):
            value = defaults.get(key, default_settings_values.get(key))
            obj.set(
                key,
                value,
                loader_identifier="default_settings",
                validate=False,
            )

    # LOAD ENVVAR OVERRIDES

    # start dotenv to get default env vars from there
    # check overrides in env vars
    if obj.get("load_dotenv") is True:
        default_settings.start_dotenv(obj)

    # Deal with cases where a custom ENV_SWITCHER_IS_PROVIDED
    # Example: Flask and Django Extensions
    env_switcher = defaults.get(
        "ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
    )

    for key in all_keys:
        if key not in default_settings_values.keys():
            continue

        env_value = obj.get_environ(
            env_switcher if key == "ENV_FOR_DYNACONF" else key,
            default="_not_found",
        )

        if env_value != "_not_found":
            obj.set(
                key,
                env_value,
                tomlfy=True,
                loader_identifier="envvars_first_load",
            )


def execute_instance_hooks(
    obj: Settings, hook_type: str, hook_functions: list[Callable]
):
    """Execute hooks provided by Setting instance"""
    hook_source = "instance"
    for hook_func in hook_functions:
        _run_hook_function(obj, hook_type, hook_func, hook_source)


def execute_module_hooks(
    hook, obj, env=None, silent=True, key=None, modules=None, files=None
):
    """Execute dynaconf_hooks from module or filepath."""
    if hook not in ["post"]:
        raise ValueError(f"hook {hook} not supported yet.")

    # try to load hooks using python module __name__
    modules = modules or obj._loaded_py_modules
    for loaded_module in modules:
        hook_module_name = ".".join(
            loaded_module.split(".")[:-1] + ["dynaconf_hooks"]
        )
        try:
            hook_module = importlib.import_module(hook_module_name)
        except (ImportError, TypeError):
            # There was no hook on the same path as a python module
            continue
        else:
            _run_hook_module(
                hook_type=hook,
                hook_module=hook_module,
                obj=obj,
                key=key,
            )

    # Try to load from python filename path
    files = files or obj._loaded_files
    for loaded_file in files:
        hook_file = os.path.join(
            os.path.dirname(loaded_file), "dynaconf_hooks.py"
        )
        if not os.path.exists(hook_file):
            # Return early if file doesn't exist.
            # Faster than attempting to import.
            continue
        hook_module = py_loader.import_from_filename(
            obj, hook_file, silent=silent
        )
        _run_hook_module(
            hook_type=hook,
            hook_module=hook_module,
            obj=obj,
            key=key,
        )


# alias
execute_hooks = execute_module_hooks


def _get_unique_hook_id(hook_func, hook_source):
    """get unique identifier for a hook function.
    in most of cases this will be the function name@source_file
    however, if the function is a lambda, it will be a hash of the code object.
    because lambda functions are not hashable itself and we can't rely on its id.
    """
    hook_unique_id = hook_func.__name__
    if hook_unique_id == "<lambda>":
        frame_info = getattr(hook_func, "__code__", None)
        if frame_info:
            hook_unique_id = f"lambda_{hash(frame_info.co_code)}"
        else:
            hook_unique_id = f"lambda_{id(hook_func)}"
    return f"{hook_unique_id}@{hook_source}"


def _run_hook_module(hook_type, hook_module, obj, key=""):
    """
    Run a hook function from hook_module.

    Given a @hook_type, a @hook_module and a settings @obj, load the function
    and execute it if found.
    """

    # check errors
    if hook_module and getattr(hook_module, "_error", False):
        if not isinstance(hook_module._error, FileNotFoundError):
            raise hook_module._error

    # execute hook
    hook_source = hook_module.__file__
    hook_func = getattr(hook_module, hook_type, None)
    if hook_func:
        identifier = _get_unique_hook_id(hook_func, hook_source)
        if hook_type not in obj._loaded_hooks.get(identifier, {}):
            _run_hook_function(obj, hook_type, hook_func, hook_source, key)


def _run_hook_function(
    obj: Settings,
    hook_type: str,
    hook_func: Callable,
    hook_source: str = "default",
    key: str = "",
):
    """
    Run a hook function:

    It execute @hook_func, update the results into settings @obj and
    add it to _loaded_hook registry ([@hook_source][@hook_type])
    """
    # if the function has a _dynaconf_hook_source attribute set
    # hook_source to it
    hook_source = getattr(hook_func, "_dynaconf_hook_source", hook_source)

    # optional settings argument
    try:
        hook_dict = hook_func(obj.dynaconf.clone())
    except TypeError:
        hook_dict = hook_func()

    # mark as called so executors such as `load_file` can avoid calling it again
    with suppress(AttributeError, TypeError):
        # callable may not be writable, the caveat is that it will be called again in case of reload
        # however, this must not be a problem since the function should be idempotent
        # and documentation warns about this behavior.
        hook_func._called = True

    identifier = _get_unique_hook_id(hook_func, hook_source)

    if hook_dict:
        # update obj settings
        merge = hook_dict.pop(
            "dynaconf_merge", hook_dict.pop("DYNACONF_MERGE", False)
        )
        if key and key in hook_dict:
            obj.set(
                key,
                hook_dict[key],
                tomlfy=False,
                merge=merge,
                loader_identifier=identifier,
            )
        elif not key:
            obj.update(
                hook_dict,
                tomlfy=False,
                merge=merge,
                loader_identifier=identifier,
            )

    # add to registry
    obj._loaded_hooks[identifier][hook_type] = hook_dict


def settings_loader(
    obj,
    settings_module=None,
    env=None,
    silent=True,
    key=None,
    filename=None,
    validate=False,
    identifier="settings_loader",
):
    """Loads from defined settings module

    :param obj: A dynaconf instance
    :param settings_module: A path or a list of paths e.g settings.toml
    :param env: Env to look for data defaults: development
    :param silent: Boolean to raise loading errors
    :param key: Load a single key if provided
    :param filename: optional filename to override the settings_module
    :param validate: If True validate the loaded data
    :param identifier: A string or SourceMetadata to identify the loader
    """
    if filename is None:
        settings_module = settings_module or obj.settings_module
        if not settings_module:  # pragma: no cover
            return
        files = ensure_a_list(settings_module)
    else:
        files = ensure_a_list(filename)

    files.extend(ensure_a_list(obj.get("SECRETS_FOR_DYNACONF", None)))

    found_files = []
    modules_names = []
    for item in files:
        item = str(item)  # Ensure str in case of LocalPath/Path is passed.
        p_root = obj._root_path or (
            os.path.dirname(found_files[0]) if found_files else None
        )
        if has_magic(item):
            # handle possible globs inside files list
            # like ["path/*.yaml", "path/ABC?.yaml"]
            globedfiles = glob(item, root_dir=p_root)
            for globedfile in globedfiles:
                # use object.find_file logic to handle skip files
                found = obj.find_file(globedfile, project_root=p_root)
                if found:
                    found_files.append(found)
        elif item.endswith(ct.ALL_EXTENSIONS + (".py",)):
            found = obj.find_file(item, project_root=p_root)
            if found:
                found_files.append(found)
        else:
            # a bare python module name w/o extension
            modules_names.append(item)

    enabled_core_loaders = [
        item.upper() for item in obj.get("CORE_LOADERS_FOR_DYNACONF") or []
    ]

    # add `.local.` to found_files list to search for local files.
    found_files.extend(
        [
            get_local_filename(item)
            for item in found_files
            if ".local." not in str(item)
        ]
    )

    for mod_file in modules_names + found_files:
        # can be set to multiple files settings.py,settings.yaml,...

        # Cascade all loaders
        loaders = [
            {"ext": ct.YAML_EXTENSIONS, "name": "YAML", "loader": yaml_loader},
            {"ext": ct.TOML_EXTENSIONS, "name": "TOML", "loader": toml_loader},
            {"ext": ct.INI_EXTENSIONS, "name": "INI", "loader": ini_loader},
            {"ext": ct.JSON_EXTENSIONS, "name": "JSON", "loader": json_loader},
        ]

        for loader in loaders:
            if loader["name"] not in enabled_core_loaders:
                continue

            if mod_file.endswith(loader["ext"]):
                if isinstance(identifier, str):
                    # ensure it is always loader name
                    identifier = loader["name"].lower()
                loader["loader"].load(
                    obj,
                    filename=mod_file,
                    env=env,
                    silent=silent,
                    key=key,
                    validate=validate,
                    identifier=identifier,
                )
                continue

        if mod_file.endswith(ct.ALL_EXTENSIONS):
            continue

        if "PY" not in enabled_core_loaders:
            # pyloader is disabled
            continue

        # must be Python file or module
        # load from default defined module settings.py or .secrets.py if exists
        py_loader.load(
            obj, mod_file, key=key, validate=validate, identifier=identifier
        )

        # load from the current env e.g: development_settings.py
        # counting on the case where env is a comma separated string
        env = env or obj.current_env
        if env and isinstance(env, str):
            for env_name in env.split(","):
                load_from_env_named_file(
                    obj, env_name, key, validate, identifier, mod_file
                )


def load_from_env_named_file(obj, env, key, validate, identifier, mod_file):
    """Load from env named file e.g: development_settings.py"""
    if mod_file.endswith(".py"):
        if ".secrets.py" == mod_file:
            tmpl = ".{0}_{1}{2}"
            mod_file = "secrets.py"
        else:
            tmpl = "{0}_{1}{2}"

        dirname = os.path.dirname(mod_file)
        filename, extension = os.path.splitext(os.path.basename(mod_file))
        new_filename = tmpl.format(env.lower(), filename, extension)
        env_mod_file = os.path.join(dirname, new_filename)
        global_filename = tmpl.format("global", filename, extension)
        global_mod_file = os.path.join(dirname, global_filename)
    else:
        parts = mod_file.rsplit(".", 1)
        if len(parts) > 1:
            head, tail = parts
        else:
            head, tail = None, parts[0]
        tail = env_mod_file = f"{env.lower()}_{tail}"

        if head:
            env_mod_file = f"{head}.{tail}"
            global_mod_file = f"{head}.global_{tail}"
        else:
            env_mod_file = tail
            global_mod_file = f"global_{tail}"

    source_metadata = SourceMetadata(
        loader="py",
        identifier=identifier,
        env=env,
    )
    py_loader.load(
        obj,
        env_mod_file,
        identifier=source_metadata,
        silent=True,
        key=key,
        validate=validate,
    )

    # load from global_settings.py
    py_loader.load(
        obj,
        global_mod_file,
        identifier="py_global",
        silent=True,
        key=key,
        validate=validate,
    )


def enable_external_loaders(obj):
    """Enable external service loaders like `VAULT_` and `REDIS_`
    looks forenv variables like `REDIS_ENABLED_FOR_DYNACONF`
    """
    for name, loader in ct.EXTERNAL_LOADERS.items():
        enabled = getattr(obj, f"{name.upper()}_ENABLED_FOR_DYNACONF", False)
        if (
            enabled
            and enabled not in false_values
            and loader not in obj.LOADERS_FOR_DYNACONF
        ):  # noqa
            obj.LOADERS_FOR_DYNACONF.insert(0, loader)


def write(filename, data, env=None, merge=False):
    """Writes `data` to `filename` infers format by file extension."""
    loader_name = f"{filename.rpartition('.')[-1]}_loader"
    loader = globals().get(loader_name)
    if not loader:
        raise OSError(f"{loader_name} cannot be found.")

    data = DynaBox(data, box_settings={}).to_dict()
    if loader is not py_loader and env and env not in data:
        data = {env: data}

    loader.write(filename, data, merge=merge)
