File: test_log.py

package info (click to toggle)
cloud-init 25.3-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 12,412 kB
  • sloc: python: 135,894; sh: 3,883; makefile: 141; javascript: 30; xml: 22
file content (172 lines) | stat: -rw-r--r-- 5,537 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
# This file is part of cloud-init. See LICENSE file for license information.

"""Tests for cloudinit.log"""

import datetime
import io
import logging
import time
from typing import cast

import pytest

from cloudinit import lifecycle, util
from cloudinit.analyze.dump import CLOUD_INIT_ASCTIME_FMT
from cloudinit.log import loggers


@pytest.fixture
def ci_logs():
    return io.StringIO()


@pytest.fixture
def log(ci_logs):
    # set up a logger like cloud-init does in setup_logging, but instead
    # of sys.stderr, we'll plug in a StringIO() object so we can see
    # what gets logged
    logging.Formatter.converter = time.gmtime
    ci_root = logging.getLogger()
    console = logging.StreamHandler(ci_logs)
    console.setFormatter(logging.Formatter(loggers.DEFAULT_LOG_FORMAT))
    console.setLevel(logging.DEBUG)
    ci_root.addHandler(console)
    ci_root.setLevel(logging.DEBUG)
    LOG = logging.getLogger("test_cloudinit_logger")
    return LOG


class TestCloudInitLogger:

    def test_logger_uses_gmtime(self, log, ci_logs):
        """Test that log message have timestamp in UTC (gmtime)"""

        # Log a message, extract the timestamp from the log entry
        # convert to datetime, and compare to a utc timestamp before
        # and after the logged message.

        # Due to loss of precision in the LOG timestamp, subtract and add
        # time to the utc stamps for comparison
        #
        # utc_before: 2017-08-23 14:19:42.569299
        # parsed dt : 2017-08-23 14:19:43.069000
        # utc_after : 2017-08-23 14:19:43.570064

        def remove_tz(_dt: datetime.datetime) -> datetime.datetime:
            """
            Removes the timezone object from an aware datetime dt without
            conversion of date and time data
            """
            return _dt.replace(tzinfo=None)

        utc_before = remove_tz(
            datetime.datetime.now(datetime.timezone.utc)
            - datetime.timedelta(0, 0.5)
        )
        log.error("Test message")
        utc_after = remove_tz(
            datetime.datetime.now(datetime.timezone.utc)
            + datetime.timedelta(0, 0.5)
        )

        # extract timestamp from log:
        # 2017-08-23 14:19:43,069 - test_log.py[ERROR]: Test message
        logstr = ci_logs.getvalue().splitlines()[0]
        timestampstr = logstr.split(" - ")[0]
        parsed_dt = datetime.datetime.strptime(
            timestampstr, CLOUD_INIT_ASCTIME_FMT
        )

        assert utc_before < parsed_dt
        assert parsed_dt < utc_after
        assert utc_before < utc_after
        assert utc_after > parsed_dt


class TestDeprecatedLogs:
    def test_deprecated_log_level(self, caplog):
        logger = cast(loggers.CustomLoggerType, logging.getLogger())
        logger.deprecated("deprecated message")
        assert "DEPRECATED" == caplog.records[0].levelname
        assert "deprecated message" in caplog.text

    def test_trace_log_level(self, caplog):
        logger = cast(loggers.CustomLoggerType, logging.getLogger())
        logger.setLevel(logging.NOTSET)
        logger.trace("trace message")
        assert "TRACE" == caplog.records[0].levelname
        assert "trace message" in caplog.text

    @pytest.mark.parametrize(
        "expected_log_level, deprecation_info_boundary",
        (
            pytest.param(
                "DEPRECATED",
                "19.2",
                id="test_same_deprecation_info_boundary_is_deprecated_level",
            ),
            pytest.param(
                "INFO",
                "19.1",
                id="test_lower_deprecation_info_boundary_is_info_level",
            ),
        ),
    )
    def test_deprecate_log_level_based_on_features(
        self,
        expected_log_level,
        deprecation_info_boundary,
        caplog,
        mocker,
        clear_deprecation_log,
    ):
        """Deprecation log level depends on key deprecation_version

        When DEPRECATION_INFO_BOUNDARY is set to a version number, and a key
        has a deprecated_version with a version greater than the boundary
        the log level is INFO instead of DEPRECATED. If
        DEPRECATION_INFO_BOUNDARY is set to the default, "devel", all
        deprecated keys are logged at level DEPRECATED.
        """
        mocker.patch.object(
            util.features,
            "DEPRECATION_INFO_BOUNDARY",
            deprecation_info_boundary,
        )
        lifecycle.deprecate(
            deprecated="some key",
            deprecated_version="19.2",
            extra_message="dont use it",
        )
        assert expected_log_level == caplog.records[0].levelname
        assert (
            "some key is deprecated in 19.2 and scheduled to be removed in"
            " 24.2" in caplog.text
        )

    def test_log_deduplication(self, caplog):
        loggers.define_extra_loggers()
        lifecycle.deprecate(
            deprecated="stuff",
            deprecated_version="19.1",
            extra_message=":)",
        )
        lifecycle.deprecate(
            deprecated="stuff",
            deprecated_version="19.1",
            extra_message=":)",
        )
        lifecycle.deprecate(
            deprecated="stuff",
            deprecated_version="19.1",
            extra_message=":)",
            schedule=6,
        )
        assert 2 == len(caplog.records)


def test_logger_prints_to_stderr(capsys):
    message = "to stdout"
    loggers.setup_basic_logging()
    logging.getLogger().warning(message)
    assert message in capsys.readouterr().err