File: json2src.py

package info (click to toggle)
level-zero 1.26.2-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 13,468 kB
  • sloc: cpp: 130,327; ansic: 16,197; python: 9,824; makefile: 4
file content (242 lines) | stat: -rwxr-xr-x 10,181 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
#! /usr/bin/env python3
"""
 Copyright (C) 2019-2025 Intel Corporation

 SPDX-License-Identifier: MIT

"""
import argparse
import util
import generate_code
import os, sys
import time
import json
import re

"""
    helper for adding mutually-exclusive boolean arguments "--name" and "--skip-name"
"""
def add_argument(parser, name, help, default=False):
    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument("--" + name, dest=name, help="Enable "+help, action="store_true")
    group.add_argument("--skip-" + name, dest=name, help="Skip "+help, action="store_false")
    parser.set_defaults(**{name:default})

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    add_argument(parser, "lib", "generation of lib files.", True)
    add_argument(parser, "loader", "generation of loader files.", True)
    add_argument(parser, "layers", "generation of validation layer files.", True)
    add_argument(parser, "drivers", "generation of null driver files.", True)
    parser.add_argument("--debug", action='store_true', help="dump intermediate data to disk.")
    parser.add_argument("--sections", type=list, default=None, help="Optional list of sections for which to generate source, default is all")
    parser.add_argument("--ver", type=str, default="1.0", help="specification version to generate.")
    parser.add_argument('--api-json', nargs='?', type=argparse.FileType('r'), default=sys.stdin, help="JSON file containing the API specification, by default read from stdin")
    parser.add_argument("out_dir", type=str, help="Root of the loader repository.")
    args = parser.parse_args()

    input = json.loads(args.api_json.read())

    start = time.time()

    srcpath = os.path.join(args.out_dir, "source")

    for idx, specs in enumerate(input['specs']):
        config = input['configs'][idx]
        if args.sections == None or config['name'] in args.sections:
            if args.lib:
                generate_code.generate_lib(srcpath, config['name'], config['namespace'], config['tags'], args.ver, specs, input['meta'])
            if args.loader:
                generate_code.generate_loader(srcpath, config['name'], config['namespace'], config['tags'], args.ver, specs, input['meta'])
            if args.layers:
                generate_code.generate_layers(srcpath, config['name'], config['namespace'], config['tags'], args.ver, specs, input['meta'])
            if args.drivers:
                generate_code.generate_drivers(srcpath, config['name'], config['namespace'], config['tags'], args.ver, specs, input['meta'])

    def merge_header_files(input_files, output_file):
        """
        Merges the unique content of multiple header files into a single output file.

        Args:
            input_files: A list of paths to the input header files.
            output_file: The path to the output file.
        """
        try:
            unique_lines = set()
            for infile_path in input_files:
                try:
                    with open(infile_path, 'r') as infile:
                        inside_factory = False
                        for line in infile:
                            if '/// factories' in line:
                                inside_factory = True
                                continue
                            if '/// end factories' in line:
                                inside_factory = False
                                continue
                            if inside_factory:
                                unique_lines.add(line)
                except FileNotFoundError:
                    print(f"Error: Input file not found: {infile_path}")
                    return

            with open(output_file, 'w') as outfile:
                for line in sorted(unique_lines):
                    outfile.write(line)
            print(f"Successfully merged unique header file content into: {output_file}")
        except Exception as e:
            print(f"An error occurred: {e}")

    header_files = [
        'source/loader/ze_loader_internal_tmp.h',
        'source/loader/zet_loader_internal_tmp.h',
        'source/loader/zes_loader_internal_tmp.h',
        'source/loader/zer_loader_internal_tmp.h'
    ]
    output_file = 'source/loader/ze_loader_internal_factories.h'
    merge_header_files(header_files, output_file)
    def replace_factory_section(input_file, factory_file, output_file):
        """
        Replaces the content between '/// factory' and '/// end factory' in the input file
        with the content from the factory file and writes the result to the output file.

        Args:
            input_file: The path to the input file.
            factory_file: The path to the factory file.
            output_file: The path to the output file.
        """
        try:
            with open(input_file, 'r') as infile:
                lines = infile.readlines()

            with open(factory_file, 'r') as factory:
                factory_lines = factory.readlines()

            output_lines = []
            inside_factory = False

            for line in lines:
                if '/// factories' in line:
                    inside_factory = True
                    output_lines.append(line)
                    output_lines.extend(factory_lines)
                elif '/// end factories' in line:
                    inside_factory = False
                if not inside_factory:
                    output_lines.append(line)

            with open(output_file, 'w') as outfile:
                outfile.writelines(output_lines)

            print(f"Successfully replaced factory section in: {output_file}")
        except Exception as e:
            print(f"An error occurred: {e}")

    input_file = 'source/loader/ze_loader_internal_tmp.h'
    factory_file = 'source/loader/ze_loader_internal_factories.h'
    output_file = 'source/loader/ze_loader_internal.h'
    replace_factory_section(input_file, factory_file, output_file)

    def region_start_regex(name):
        # Matches:
        #   #if !defined(__GNUC__)
        #   #pragma region <name>
        #   #endif
        return re.compile(
            rf'(?m)^\s*#if\s*!defined\(__GNUC__\)\s*\r?\n'
            rf'^\s*#pragma\s+region\s+{re.escape(name)}\s*\r?\n'
            rf'^\s*#endif\s*'
        )

    def region_end_regex():
        # Matches:
        #   #if !defined(__GNUC__)
        #   #pragma endregion
        #   #endif
        return re.compile(
            r'(?m)^\s*#if\s*!defined\(__GNUC__\)\s*\r?\n'
            r'^\s*#pragma\s+endregion\s*\r?\n'
            r'^\s*#endif\s*'
        )

    def find_region_span(text, name):
        """
        Return (region_body_start, region_body_end) character indices for region <name>.
        region_body_start: after region opening marker
        region_body_end: start of region closing marker
        """
        open_m = region_start_regex(name).search(text)
        if not open_m:
            return None
        close_m = region_end_regex().search(text, open_m.end())
        if not close_m:
            return None
        return (open_m.end(), close_m.start())

    def extract_region_body(text, name):
        region_span = find_region_span(text, name)
        if not region_span:
            return ""
        region_body_start, region_body_end = region_span
        return text[region_body_start:region_body_end]

    def replace_region_body(text, name, new_body):
        region_span = find_region_span(text, name)
        if not region_span:
            return text
        region_body_start, region_body_end = region_span
        # Keep opening and closing markers exactly as-is; replace only body.
        return text[:region_body_start] + new_body + text[region_body_end:]

    # Merge ze/zer tracing tmp headers into a single final header
    ze_tracing_tmp = os.path.join(args.out_dir, 'include', 'layers', 'ze_tracing_register_cb_tmp.h')
    zer_tracing_tmp = os.path.join(args.out_dir, 'include', 'layers', 'zer_tracing_register_cb_tmp.h')
    final_tracing_hdr = os.path.join(args.out_dir, 'include', 'layers', 'zel_tracing_register_cb.h')

    if os.path.exists(ze_tracing_tmp):
        with open(ze_tracing_tmp, 'r') as f: ze_txt = f.read()
        zer_txt = ""
        if os.path.exists(zer_tracing_tmp):
            with open(zer_tracing_tmp, 'r') as f: zer_txt = f.read()
        else:
            print("zer_tracing_register_cb_tmp.h not found!")

        # Merge region bodies in order (ze then zer)
        callbacks_body = extract_region_body(ze_txt, 'callbacks') + "\n" + extract_region_body(zer_txt, 'callbacks')
        reg_callbacks_body = extract_region_body(ze_txt, 'register_callbacks') + "\n" + extract_region_body(zer_txt, 'register_callbacks')

        # Replace in base (ze) while preserving wrappers
        merged = replace_region_body(ze_txt, 'callbacks', callbacks_body)
        merged = replace_region_body(merged, 'register_callbacks', reg_callbacks_body)

        os.makedirs(os.path.dirname(final_tracing_hdr), exist_ok=True)
        with open(final_tracing_hdr, 'w') as f: f.write(merged)

        print(f"Successfully merged tracing header contents in: {final_tracing_hdr}")
    else:
        print("ze_tracing_register_cb_tmp.h not found!")

    # Delete temporary and factory files
    files_to_delete = [
        'source/loader/ze_loader_internal_tmp.h',
        'source/loader/zet_loader_internal_tmp.h',
        'source/loader/zes_loader_internal_tmp.h',
        'source/loader/zer_loader_internal_tmp.h',
        'source/loader/ze_loader_internal_factories.h',
        'include/layers/ze_tracing_register_cb_tmp.h',
        'include/layers/zer_tracing_register_cb_tmp.h'
    ]

    for file_path in files_to_delete:
        try:
            os.remove(file_path)
            print(f"Deleted file: {file_path}")
        except FileNotFoundError:
            print(f"File not found, could not delete: {file_path}")
        except Exception as e:
            print(f"An error occurred while deleting {file_path}: {e}")

    if args.debug:
        util.makoFileListWrite("generated.json")

    print("\nCompleted in %.1f seconds!"%(time.time() - start))