File: __init__.py

package info (click to toggle)
python-dynaconf 3.2.12-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 1,900 kB
  • sloc: python: 21,464; sh: 9; makefile: 4
file content (446 lines) | stat: -rw-r--r-- 14,480 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
from __future__ import annotations

import importlib
import os
from contextlib import suppress
from typing import Callable
from typing import TYPE_CHECKING

from dynaconf import constants as ct
from dynaconf import default_settings
from dynaconf.loaders import ini_loader
from dynaconf.loaders import json_loader
from dynaconf.loaders import py_loader
from dynaconf.loaders import toml_loader
from dynaconf.loaders import yaml_loader
from dynaconf.loaders.base import SourceMetadata
from dynaconf.utils import deduplicate
from dynaconf.utils import ensure_a_list
from dynaconf.utils.boxing import DynaBox
from dynaconf.utils.files import get_local_filename
from dynaconf.utils.files import glob
from dynaconf.utils.files import has_magic
from dynaconf.utils.parse_conf import false_values

if TYPE_CHECKING:
    from dynaconf.base import Settings


def default_loader(obj, defaults=None):
    """Initial loader for the initialization process.

    Steps:
    - Load default settings (from static module) + kwargs overrides (together)
    - Load envvar overrides
    """
    # LOAD DEFAULT STATIC + KWARGS OVERRIDES

    defaults = defaults or {}
    default_settings_values = {
        key: value
        for key, value in default_settings.__dict__.items()  # noqa
        if key.isupper()
    }

    all_keys = deduplicate(
        list(defaults.keys()) + list(default_settings_values.keys())
    )

    for key in all_keys:
        if not obj.exists(key):
            value = defaults.get(key, default_settings_values.get(key))
            obj.set(
                key,
                value,
                loader_identifier="default_settings",
                validate=False,
            )

    # LOAD ENVVAR OVERRIDES

    # start dotenv to get default env vars from there
    # check overrides in env vars
    if obj.get("load_dotenv") is True:
        default_settings.start_dotenv(obj)

    # Deal with cases where a custom ENV_SWITCHER_IS_PROVIDED
    # Example: Flask and Django Extensions
    env_switcher = defaults.get(
        "ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
    )

    for key in all_keys:
        if key not in default_settings_values.keys():
            continue

        env_value = obj.get_environ(
            env_switcher if key == "ENV_FOR_DYNACONF" else key,
            default="_not_found",
        )

        if env_value != "_not_found":
            obj.set(
                key,
                env_value,
                tomlfy=True,
                loader_identifier="envvars_first_load",
            )


def execute_instance_hooks(
    obj: Settings, hook_type: str, hook_functions: list[Callable]
):
    """Execute hooks provided by Setting instance"""
    hook_source = "instance"
    for hook_func in hook_functions:
        _run_hook_function(obj, hook_type, hook_func, hook_source)


def execute_module_hooks(
    hook, obj, env=None, silent=True, key=None, modules=None, files=None
):
    """Execute dynaconf_hooks from module or filepath."""
    if hook not in ["post"]:
        raise ValueError(f"hook {hook} not supported yet.")

    # try to load hooks using python module __name__
    modules = modules or obj._loaded_py_modules
    for loaded_module in modules:
        hook_module_name = ".".join(
            loaded_module.split(".")[:-1] + ["dynaconf_hooks"]
        )
        try:
            hook_module = importlib.import_module(hook_module_name)
        except (ImportError, TypeError):
            # There was no hook on the same path as a python module
            continue
        else:
            _run_hook_module(
                hook_type=hook,
                hook_module=hook_module,
                obj=obj,
                key=key,
            )

    # Try to load from python filename path
    files = files or obj._loaded_files
    for loaded_file in files:
        hook_file = os.path.join(
            os.path.dirname(loaded_file), "dynaconf_hooks.py"
        )
        if not os.path.exists(hook_file):
            # Return early if file doesn't exist.
            # Faster than attempting to import.
            continue
        hook_module = py_loader.import_from_filename(
            obj, hook_file, silent=silent
        )
        _run_hook_module(
            hook_type=hook,
            hook_module=hook_module,
            obj=obj,
            key=key,
        )


# alias
execute_hooks = execute_module_hooks


def _get_unique_hook_id(hook_func, hook_source):
    """get unique identifier for a hook function.
    in most of cases this will be the function name@source_file
    however, if the function is a lambda, it will be a hash of the code object.
    because lambda functions are not hashable itself and we can't rely on its id.
    """
    hook_unique_id = hook_func.__name__
    if hook_unique_id == "<lambda>":
        frame_info = getattr(hook_func, "__code__", None)
        if frame_info:
            hook_unique_id = f"lambda_{hash(frame_info.co_code)}"
        else:
            hook_unique_id = f"lambda_{id(hook_func)}"
    return f"{hook_unique_id}@{hook_source}"


def _run_hook_module(hook_type, hook_module, obj, key=""):
    """
    Run a hook function from hook_module.

    Given a @hook_type, a @hook_module and a settings @obj, load the function
    and execute it if found.
    """

    # check errors
    if hook_module and getattr(hook_module, "_error", False):
        if not isinstance(hook_module._error, FileNotFoundError):
            raise hook_module._error

    # execute hook
    hook_source = hook_module.__file__
    hook_func = getattr(hook_module, hook_type, None)
    if hook_func:
        identifier = _get_unique_hook_id(hook_func, hook_source)
        if hook_type not in obj._loaded_hooks.get(identifier, {}):
            _run_hook_function(obj, hook_type, hook_func, hook_source, key)


def _run_hook_function(
    obj: Settings,
    hook_type: str,
    hook_func: Callable,
    hook_source: str = "default",
    key: str = "",
):
    """
    Run a hook function:

    It execute @hook_func, update the results into settings @obj and
    add it to _loaded_hook registry ([@hook_source][@hook_type])
    """
    # if the function has a _dynaconf_hook_source attribute set
    # hook_source to it
    hook_source = getattr(hook_func, "_dynaconf_hook_source", hook_source)

    # optional settings argument
    try:
        hook_dict = hook_func(obj.dynaconf.clone())
    except TypeError:
        hook_dict = hook_func()

    # mark as called so executors such as `load_file` can avoid calling it again
    with suppress(AttributeError, TypeError):
        # callable may not be writable, the caveat is that it will be called again in case of reload
        # however, this must not be a problem since the function should be idempotent
        # and documentation warns about this behavior.
        hook_func._called = True

    identifier = _get_unique_hook_id(hook_func, hook_source)

    if hook_dict:
        # update obj settings
        merge = hook_dict.pop(
            "dynaconf_merge", hook_dict.pop("DYNACONF_MERGE", False)
        )
        if key and key in hook_dict:
            obj.set(
                key,
                hook_dict[key],
                tomlfy=False,
                merge=merge,
                loader_identifier=identifier,
            )
        elif not key:
            obj.update(
                hook_dict,
                tomlfy=False,
                merge=merge,
                loader_identifier=identifier,
            )

    # add to registry
    obj._loaded_hooks[identifier][hook_type] = hook_dict


def settings_loader(
    obj,
    settings_module=None,
    env=None,
    silent=True,
    key=None,
    filename=None,
    validate=False,
    identifier="settings_loader",
):
    """Loads from defined settings module

    :param obj: A dynaconf instance
    :param settings_module: A path or a list of paths e.g settings.toml
    :param env: Env to look for data defaults: development
    :param silent: Boolean to raise loading errors
    :param key: Load a single key if provided
    :param filename: optional filename to override the settings_module
    :param validate: If True validate the loaded data
    :param identifier: A string or SourceMetadata to identify the loader
    """
    if filename is None:
        settings_module = settings_module or obj.settings_module
        if not settings_module:  # pragma: no cover
            return
        files = ensure_a_list(settings_module)
    else:
        files = ensure_a_list(filename)

    files.extend(ensure_a_list(obj.get("SECRETS_FOR_DYNACONF", None)))

    found_files = []
    modules_names = []
    for item in files:
        item = str(item)  # Ensure str in case of LocalPath/Path is passed.
        p_root = obj._root_path or (
            os.path.dirname(found_files[0]) if found_files else None
        )
        if has_magic(item):
            # handle possible globs inside files list
            # like ["path/*.yaml", "path/ABC?.yaml"]
            globedfiles = glob(item, root_dir=p_root)
            for globedfile in globedfiles:
                # use object.find_file logic to handle skip files
                found = obj.find_file(globedfile, project_root=p_root)
                if found:
                    found_files.append(found)
        elif item.endswith(ct.ALL_EXTENSIONS + (".py",)):
            found = obj.find_file(item, project_root=p_root)
            if found:
                found_files.append(found)
        else:
            # a bare python module name w/o extension
            modules_names.append(item)

    enabled_core_loaders = [
        item.upper() for item in obj.get("CORE_LOADERS_FOR_DYNACONF") or []
    ]

    # add `.local.` to found_files list to search for local files.
    found_files.extend(
        [
            get_local_filename(item)
            for item in found_files
            if ".local." not in str(item)
        ]
    )

    for mod_file in modules_names + found_files:
        # can be set to multiple files settings.py,settings.yaml,...

        # Cascade all loaders
        loaders = [
            {"ext": ct.YAML_EXTENSIONS, "name": "YAML", "loader": yaml_loader},
            {"ext": ct.TOML_EXTENSIONS, "name": "TOML", "loader": toml_loader},
            {"ext": ct.INI_EXTENSIONS, "name": "INI", "loader": ini_loader},
            {"ext": ct.JSON_EXTENSIONS, "name": "JSON", "loader": json_loader},
        ]

        for loader in loaders:
            if loader["name"] not in enabled_core_loaders:
                continue

            if mod_file.endswith(loader["ext"]):
                if isinstance(identifier, str):
                    # ensure it is always loader name
                    identifier = loader["name"].lower()
                loader["loader"].load(
                    obj,
                    filename=mod_file,
                    env=env,
                    silent=silent,
                    key=key,
                    validate=validate,
                    identifier=identifier,
                )
                continue

        if mod_file.endswith(ct.ALL_EXTENSIONS):
            continue

        if "PY" not in enabled_core_loaders:
            # pyloader is disabled
            continue

        # must be Python file or module
        # load from default defined module settings.py or .secrets.py if exists
        py_loader.load(
            obj, mod_file, key=key, validate=validate, identifier=identifier
        )

        # load from the current env e.g: development_settings.py
        # counting on the case where env is a comma separated string
        env = env or obj.current_env
        if env and isinstance(env, str):
            for env_name in env.split(","):
                load_from_env_named_file(
                    obj, env_name, key, validate, identifier, mod_file
                )


def load_from_env_named_file(obj, env, key, validate, identifier, mod_file):
    """Load from env named file e.g: development_settings.py"""
    if mod_file.endswith(".py"):
        if ".secrets.py" == mod_file:
            tmpl = ".{0}_{1}{2}"
            mod_file = "secrets.py"
        else:
            tmpl = "{0}_{1}{2}"

        dirname = os.path.dirname(mod_file)
        filename, extension = os.path.splitext(os.path.basename(mod_file))
        new_filename = tmpl.format(env.lower(), filename, extension)
        env_mod_file = os.path.join(dirname, new_filename)
        global_filename = tmpl.format("global", filename, extension)
        global_mod_file = os.path.join(dirname, global_filename)
    else:
        parts = mod_file.rsplit(".", 1)
        if len(parts) > 1:
            head, tail = parts
        else:
            head, tail = None, parts[0]
        tail = env_mod_file = f"{env.lower()}_{tail}"

        if head:
            env_mod_file = f"{head}.{tail}"
            global_mod_file = f"{head}.global_{tail}"
        else:
            env_mod_file = tail
            global_mod_file = f"global_{tail}"

    source_metadata = SourceMetadata(
        loader="py",
        identifier=identifier,
        env=env,
    )
    py_loader.load(
        obj,
        env_mod_file,
        identifier=source_metadata,
        silent=True,
        key=key,
        validate=validate,
    )

    # load from global_settings.py
    py_loader.load(
        obj,
        global_mod_file,
        identifier="py_global",
        silent=True,
        key=key,
        validate=validate,
    )


def enable_external_loaders(obj):
    """Enable external service loaders like `VAULT_` and `REDIS_`
    looks forenv variables like `REDIS_ENABLED_FOR_DYNACONF`
    """
    for name, loader in ct.EXTERNAL_LOADERS.items():
        enabled = getattr(obj, f"{name.upper()}_ENABLED_FOR_DYNACONF", False)
        if (
            enabled
            and enabled not in false_values
            and loader not in obj.LOADERS_FOR_DYNACONF
        ):  # noqa
            obj.LOADERS_FOR_DYNACONF.insert(0, loader)


def write(filename, data, env=None, merge=False):
    """Writes `data` to `filename` infers format by file extension."""
    loader_name = f"{filename.rpartition('.')[-1]}_loader"
    loader = globals().get(loader_name)
    if not loader:
        raise OSError(f"{loader_name} cannot be found.")

    data = DynaBox(data, box_settings={}).to_dict()
    if loader is not py_loader and env and env not in data:
        data = {env: data}

    loader.write(filename, data, merge=merge)