File: builder.py

package info (click to toggle)
sphinx-needs 5.1.0%2Bdfsg-6
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 12,108 kB
  • sloc: python: 21,148; javascript: 187; makefile: 95; sh: 29; xml: 10
file content (259 lines) | stat: -rw-r--r-- 8,608 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
from __future__ import annotations

import os
from collections.abc import Iterable, Sequence

from docutils import nodes
from sphinx.application import Sphinx
from sphinx.builders import Builder

from sphinx_needs.config import NeedsSphinxConfig
from sphinx_needs.data import SphinxNeedsData
from sphinx_needs.logging import get_logger
from sphinx_needs.needsfile import NeedsList

LOGGER = get_logger(__name__)


class NeedsBuilder(Builder):
    """Output the needs data as a JSON file,
    filtering by the ``needs_builder_filter`` config option if set,
    and writing to ``needs.json`` (or the ``needs_file`` config option if set)
    in the output folder.

    Note this builder normally completely skips the write phase,
    where all documents are post-transformed, to improve performance.
    It is assumed all need data is already read in the read phase,
    and the post-processing of the data is done in the finish phase.
    """

    name = "needs"
    format = "needs"
    file_suffix = ".txt"
    links_suffix = None

    def get_outdated_docs(self) -> Iterable[str]:
        return []

    def write(
        self,
        build_docnames: Iterable[str] | None,
        updated_docnames: Sequence[str],
        method: str = "update",
    ) -> None:
        return

    def finish(self) -> None:
        from sphinx_needs.filter_common import filter_needs_view

        data = SphinxNeedsData(self.env)
        needs = data.get_needs_view()
        needs_config = NeedsSphinxConfig(self.env.config)
        version = getattr(self.env.config, "version", "unset")
        needs_list = NeedsList(self.env.config, self.outdir, self.srcdir)

        if needs_config.file:
            needs_file = needs_config.file
            needs_list.load_json(needs_file)
        else:
            # check if needs.json file exists in conf.py directory
            needs_json = os.path.join(self.srcdir, "needs.json")
            if os.path.exists(needs_json):
                LOGGER.info(
                    "needs.json found, but will not be used because needs_file not configured."
                )

        # Clean needs_list from already stored needs of the current version.
        # This is needed as needs could have been removed from documentation and if this is the case,
        # removed needs would stay in needs_list, if list gets not cleaned.
        needs_list.wipe_version(version)

        filter_string = needs_config.builder_filter
        filtered_needs = filter_needs_view(
            needs,
            needs_config,
            filter_string,
            append_warning="(from need_builder_filter)",
        )

        for need in filtered_needs:
            needs_list.add_need(version, need)

        try:
            needs_list.write_json()
        except Exception as e:
            LOGGER.error(f"Error during writing json file: {e}")
        else:
            LOGGER.info("Needs successfully exported")

    def get_target_uri(self, _docname: str, _typ: str | None = None) -> str:
        # only needed if the write phase is run
        return ""

    def prepare_writing(self, _docnames: set[str]) -> None:
        # only needed if the write phase is run
        pass

    def write_doc(self, docname: str, doctree: nodes.document) -> None:
        # only needed if the write phase is run
        pass

    def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None:
        # only needed if the write phase is run
        pass

    def cleanup(self) -> None:
        # only needed if the write phase is run
        pass


def build_needs_json(app: Sphinx, _exception: Exception) -> None:
    env = app.env

    if not NeedsSphinxConfig(env.config).build_json:
        return

    # Do not create an additional needs.json, if builder is already "needs".
    if isinstance(app.builder, NeedsBuilder):
        return

    needs_builder = NeedsBuilder(app, env)
    needs_builder.finish()


class NeedsIdBuilder(Builder):
    """Output the needs data as multiple JSON files, one per need,
    filtering by the ``needs_builder_filter`` config option if set,
    and writing to the ``needs_id`` folder (or the ``build_json_per_id_path`` config option if set)
    in the output folder.

    Note this builder completely skips the write phase,
    where all documents are post-transformed, to improve performance.
    It is assumed all need data is already read in the read phase,
    and the post-processing of the data is done in the finish phase.
    """

    name = "needs_id"
    format = "needs"
    file_suffix = ".txt"
    links_suffix = None

    def get_outdated_docs(self) -> Iterable[str]:
        return []

    def write(
        self,
        build_docnames: Iterable[str] | None,
        updated_docnames: Sequence[str],
        method: str = "update",
    ) -> None:
        pass

    def finish(self) -> None:
        from sphinx_needs.filter_common import filter_needs_view

        data = SphinxNeedsData(self.env)
        version = getattr(self.env.config, "version", "unset")
        needs_config = NeedsSphinxConfig(self.env.config)
        filter_string = needs_config.builder_filter

        filtered_needs = filter_needs_view(
            data.get_needs_view(),
            needs_config,
            filter_string,
            append_warning="(from need_builder_filter)",
        )
        needs_build_json_per_id_path = needs_config.build_json_per_id_path
        needs_dir = os.path.join(self.outdir, needs_build_json_per_id_path)
        if not os.path.exists(needs_dir):
            os.makedirs(needs_dir, exist_ok=True)
        for need in filtered_needs:
            needs_list = NeedsList(
                self.env.config, self.outdir, self.srcdir, add_schema=False
            )
            needs_list.wipe_version(version)
            needs_list.add_need(version, need)
            id = need["id"]
            try:
                file_name = f"{id}.json"
                needs_list.write_json(file_name, needs_dir)
            except Exception as e:
                LOGGER.error(f"Needs-ID Builder {id} error: {e}")
        LOGGER.info("Needs_id successfully exported")


def build_needs_id_json(app: Sphinx, _exception: Exception) -> None:
    env = app.env

    if not NeedsSphinxConfig(env.config).build_json_per_id:
        return

    # Do not create an additional needs_json for every needs_id, if builder is already "needs_id".
    if isinstance(app.builder, NeedsIdBuilder):
        return

    needs_id_builder = NeedsIdBuilder(app, env)
    needs_id_builder.finish()


class NeedumlsBuilder(Builder):
    """Write generated PlantUML input files to the output dir,
    that were generated by need directives,
    if they have a ``save`` field set,
    denoting the path relative to the output folder.
    """

    name = "needumls"

    def write_doc(self, docname: str, doctree: nodes.document) -> None:
        pass

    def finish(self) -> None:
        env = self.env
        needumls = SphinxNeedsData(env).get_or_create_umls().values()

        for needuml in needumls:
            if needuml["save"]:
                puml_content = needuml["content_calculated"]
                # check if given save path dir exists
                save_path = os.path.join(self.outdir, needuml["save"])
                save_dir = os.path.dirname(save_path)
                if not os.path.exists(save_dir):
                    os.makedirs(save_dir, exist_ok=True)

                LOGGER.info(f"Storing needuml data to file {save_path}.")
                with open(save_path, "w") as f:
                    f.write(puml_content)

    def get_outdated_docs(self) -> Iterable[str]:
        return []

    def prepare_writing(self, _docnames: set[str]) -> None:
        pass

    def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None:
        pass

    def cleanup(self) -> None:
        pass

    def get_target_uri(self, _docname: str, _typ: str | None = None) -> str:
        return ""


def build_needumls_pumls(app: Sphinx, _exception: Exception) -> None:
    env = app.env
    config = NeedsSphinxConfig(env.config)

    if not config.build_needumls:
        return

    # Do not create additional files for saved plantuml content, if builder is already "needumls".
    if isinstance(app.builder, NeedumlsBuilder):
        return

    # if other builder like html used together with config: needs_build_needumls
    needs_builder = NeedumlsBuilder(app, env)
    needs_builder.outdir = os.path.join(needs_builder.outdir, config.build_needumls)  # type: ignore[assignment]

    needs_builder.finish()