File: export_targets.py

package info (click to toggle)
vulkan-validationlayers 1.4.321.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 47,412 kB
  • sloc: cpp: 594,175; python: 11,321; sh: 24; makefile: 20; xml: 14
file content (373 lines) | stat: -rwxr-xr-x 12,148 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
#! /usr/bin/env python3
assert __name__ == '__main__'

'''
To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and
python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come
before python3 in your path.

Upstream: https://chromium.googlesource.com/angle/angle

Our repo: https://github.com/mozilla/angle
It has branches like 'firefox-60' which is the branch we use for pulling into
Gecko with this script.

This script leaves a record of the merge-base and cherry-picks that we pull into
Gecko. (gfx/angle/cherries.log)

ANGLE<->Chrome version mappings are here: https://omahaproxy.appspot.com/
An easy choice is to grab Chrome's Beta's ANGLE branch.

## Usage

Prepare your env:

~~~
export PATH="$PATH:/path/to/depot_tools"
~~~

If this is a new repo, don't forget:

~~~
# In the angle repo:
./scripts/bootstrap.py
gclient sync
~~~

Update: (in the angle repo)

~~~
# In the angle repo:
/path/to/gecko/gfx/angle/update-angle.py origin/chromium/XXXX
git push moz # Push the firefox-XX branch to github.com/mozilla/angle
~~~~

'''

import json
import os
import pathlib
import re
import shutil
import subprocess
import sys
from typing import * # mypy annotations

SCRIPT_DIR = os.path.dirname(__file__)

GN_ENV = dict(os.environ)
# We need to set DEPOT_TOOLS_WIN_TOOLCHAIN to 0 for non-Googlers, but otherwise
# leave it unset since vs_toolchain.py assumes that the user is a Googler with
# the Visual Studio files in depot_tools if DEPOT_TOOLS_WIN_TOOLCHAIN is not
# explicitly set to 0.
vs_found = False
vs_dir = os.path.join(SCRIPT_DIR, '..', 'third_party', 'depot_tools', 'win_toolchain', 'vs_files')
if not os.path.isdir(vs_dir):
    GN_ENV['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'

if len(sys.argv) < 3:
    sys.exit('Usage: export_targets.py OUT_DIR ROOTS...')

(OUT_DIR, *ROOTS) = sys.argv[1:]
for x in ROOTS:
    assert x.startswith('//:')

# ------------------------------------------------------------------------------

def run_checked(*args, **kwargs):
    print(' ', args, file=sys.stderr)
    sys.stderr.flush()
    return subprocess.run(args, check=True, **kwargs)


def sortedi(x):
    return sorted(x, key=str.lower)


def dag_traverse(root_keys: Sequence[str], pre_recurse_func: Callable[[str], list]):
    visited_keys: Set[str] = set()

    def recurse(key):
        if key in visited_keys:
            return
        visited_keys.add(key)

        t = pre_recurse_func(key)
        try:
            (next_keys, post_recurse_func) = t
        except ValueError:
            (next_keys,) = t
            post_recurse_func = None

        for x in next_keys:
            recurse(x)

        if post_recurse_func:
            post_recurse_func(key)
        return

    for x in root_keys:
        recurse(x)
    return

# ------------------------------------------------------------------------------

print('Importing graph', file=sys.stderr)

try:
    p = run_checked('gn', 'desc', '--format=json', str(OUT_DIR), '*', stdout=subprocess.PIPE,
                env=GN_ENV, shell=(True if sys.platform == 'win32' else False))
except subprocess.CalledProcessError:
    sys.stderr.buffer.write(b'"gn desc" failed. Is depot_tools in your PATH?\n')
    exit(1)

# -

print('\nProcessing graph', file=sys.stderr)
descs = json.loads(p.stdout.decode())

# Ready to traverse
# ------------------------------------------------------------------------------

LIBRARY_TYPES = ('shared_library', 'static_library')

def flattened_target(target_name: str, descs: dict, stop_at_lib: bool =True) -> dict:
    flattened = dict(descs[target_name])

    EXPECTED_TYPES = LIBRARY_TYPES + ('source_set', 'group', 'action')

    def pre(k):
        dep = descs[k]

        dep_type = dep['type']
        deps = dep['deps']
        if stop_at_lib and dep_type in LIBRARY_TYPES:
            return ((),)

        if dep_type == 'copy':
            assert not deps, (target_name, dep['deps'])
        else:
            assert dep_type in EXPECTED_TYPES, (k, dep_type)
            for (k,v) in dep.items():
                if type(v) in (list, tuple, set):
                    # This is a workaround for
                    # https://bugs.chromium.org/p/gn/issues/detail?id=196, where
                    # the value of "public" can be a string instead of a list.
                    existing = flattened.get(k, [])
                    if isinstance(existing, str):
                      existing = [existing]
                    # Use temporary sets then sort them to avoid a bottleneck here
                    if not isinstance(existing, set):
                        flattened[k] = set(existing)
                    flattened[k].update(v)
                else:
                    #flattened.setdefault(k, v)
                    pass
        return (deps,)

    dag_traverse(descs[target_name]['deps'], pre)

    for k, v in flattened.items():
        if isinstance(v, set):
            flattened[k] = sortedi(v)
    return flattened

# ------------------------------------------------------------------------------
# Check that includes are valid. (gn's version of this check doesn't seem to work!)

INCLUDE_REGEX = re.compile(b'^ *# *include +([<"])([^>"]+)[>"].*$', re.MULTILINE)
assert INCLUDE_REGEX.findall(b' #  include <foo>  //comment\n#include "bar"') == [(b'<', b'foo'), (b'"', b'bar')]

# Most of these are ignored because this script does not currently handle
# #includes in #ifdefs properly, so they will erroneously be marked as being
# included, but not part of the source list.
IGNORED_INCLUDES = {
    b'absl/container/flat_hash_map.h',
    b'absl/container/flat_hash_set.h',
    b'compiler/translator/glsl/TranslatorESSL.h',
    b'compiler/translator/glsl/TranslatorGLSL.h',
    b'compiler/translator/hlsl/TranslatorHLSL.h',
    b'compiler/translator/msl/TranslatorMSL.h',
    b'compiler/translator/null/TranslatorNULL.h',
    b'compiler/translator/spirv/TranslatorSPIRV.h',
    b'compiler/translator/wgsl/TranslatorWGSL.h',
    b'contrib/optimizations/slide_hash_neon.h',
    b'dirent_on_windows.h',
    b'dlopen_fuchsia.h',
    b'kernel/image.h',
    b'libANGLE/renderer/d3d/d3d11/Device11.h',
    b'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h',
    b'libANGLE/renderer/d3d/DisplayD3D.h',
    b'libANGLE/renderer/d3d/RenderTargetD3D.h',
    b'libANGLE/renderer/gl/cgl/DisplayCGL.h',
    b'libANGLE/renderer/gl/egl/android/DisplayAndroid.h',
    b'libANGLE/renderer/gl/egl/DisplayEGL.h',
    b'libANGLE/renderer/gl/egl/gbm/DisplayGbm.h',
    b'libANGLE/renderer/gl/glx/DisplayGLX.h',
    b'libANGLE/renderer/gl/glx/DisplayGLX_api.h',
    b'libANGLE/renderer/gl/wgl/DisplayWGL.h',
    b'libANGLE/renderer/metal/DisplayMtl_api.h',
    b'libANGLE/renderer/null/DisplayNULL.h',
    b'libANGLE/renderer/vulkan/android/AHBFunctions.h',
    b'libANGLE/renderer/vulkan/android/DisplayVkAndroid.h',
    b'libANGLE/renderer/vulkan/DisplayVk_api.h',
    b'libANGLE/renderer/vulkan/fuchsia/DisplayVkFuchsia.h',
    b'libANGLE/renderer/vulkan/ggp/DisplayVkGGP.h',
    b'libANGLE/renderer/vulkan/mac/DisplayVkMac.h',
    b'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h',
    b'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h',
    b'libANGLE/renderer/vulkan/wayland/DisplayVkWayland.h',
    b'loader_cmake_config.h',
    b'loader_linux.h',
    b'loader_windows.h',
    b'optick.h',
    b'spirv-tools/libspirv.h',
    b'third_party/volk/volk.h',
    b'vk_loader_extensions.c',
    b'vk_snippets.h',
    b'vulkan_android.h',
    b'vulkan_beta.h',
    b'vulkan_directfb.h',
    b'vulkan_fuchsia.h',
    b'vulkan_ggp.h',
    b'vulkan_ios.h',
    b'vulkan_macos.h',
    b'vulkan_metal.h',
    b'vulkan_sci.h',
    b'vulkan_vi.h',
    b'vulkan_wayland.h',
    b'vulkan_win32.h',
    b'vulkan_xcb.h',
    b'vulkan_xlib.h',
    b'vulkan_xlib_xrandr.h',
    b'vulkan_ohos.h',
    # rapidjson adds these include stubs into their documentation
    # comments. Since the script doesn't skip comments they are
    # erroneously marked as valid includes
    b'rapidjson/...',
    # Validation layers support building with robin hood hashing, but we are not enabling that
    # See http://anglebug.com/42264327
    # Update: Validation layers now use parallel hashmap, but still it is not wanted in BUILD.gn
    b'parallel_hashmap/phmap.h',
    # Validation layers optionally use mimalloc
    b'mimalloc-new-delete.h',
    b'mimalloc-stats.h',
    # From the Vulkan-Loader
    b'winres.h',
    # From a comment in vulkan-validation-layers/src/layers/vk_mem_alloc.h
    b'my_custom_assert.h',
    b'my_custom_min.h',
    # https://bugs.chromium.org/p/gn/issues/detail?id=311
    b'spirv/unified1/spirv.hpp11',
    # Behind #if defined(QAT_COMPRESSION_ENABLED) in third_party/zlib/deflate.c
    b'contrib/qat/deflate_qat.h',
    # Behind #if defined(TRACY_ENABLE) in third_party/vulkan-validation-layers/src/layers/vulkan/generated/chassis.cpp
    b'profiling/profiling.h',
}

IGNORED_INCLUDE_PREFIXES = {
    b'android',
    b'Carbon',
    b'CoreFoundation',
    b'CoreServices',
    b'IOSurface',
    b'mach',
    b'mach-o',
    b'OpenGL',
    b'pci',
    b'sys',
    b'wrl',
    b'X11',
}

IGNORED_DIRECTORIES = {
    '//buildtools/third_party/libc++',
    '//third_party/libc++/src',
    '//third_party/abseil-cpp',
    '//third_party/SwiftShader',
    '//third_party/dawn',
}

def has_all_includes(target_name: str, descs: dict) -> bool:
    for ignored_directory in IGNORED_DIRECTORIES:
        if target_name.startswith(ignored_directory):
            return True

    flat = flattened_target(target_name, descs, stop_at_lib=False)
    acceptable_sources = flat.get('sources', []) + flat.get('outputs', [])
    acceptable_sources = {x.rsplit('/', 1)[-1].encode() for x in acceptable_sources}

    ret = True
    desc = descs[target_name]
    for cur_file in desc.get('sources', []):
        assert cur_file.startswith('/'), cur_file
        if not cur_file.startswith('//'):
            continue
        cur_file = pathlib.Path(cur_file[2:])
        text = cur_file.read_bytes()
        for m in INCLUDE_REGEX.finditer(text):
            if m.group(1) == b'<':
                continue
            include = m.group(2)
            if include in IGNORED_INCLUDES:
                continue
            try:
                (prefix, _) = include.split(b'/', 1)
                if prefix in IGNORED_INCLUDE_PREFIXES:
                    continue
            except ValueError:
                pass

            include_file = include.rsplit(b'/', 1)[-1]
            if include_file not in acceptable_sources:
                #print('  acceptable_sources:')
                #for x in sorted(acceptable_sources):
                #    print('   ', x)
                print('Warning in {}: {}: Included file must be listed in the GN target or its public dependency: {}'.format(target_name, cur_file, include), file=sys.stderr)
                ret = False
            #print('Looks valid:', m.group())
            continue

    return ret

# -
# Gather real targets:

def gather_libraries(roots: Sequence[str], descs: dict) -> Set[str]:
    libraries = set()
    def fn(target_name):
        cur = descs[target_name]
        print('  ' + cur['type'], target_name, file=sys.stderr)
        assert has_all_includes(target_name, descs), target_name

        if cur['type'] in ('shared_library', 'static_library'):
            libraries.add(target_name)
        return (cur['deps'], )

    dag_traverse(roots, fn)
    return libraries

# -

libraries = gather_libraries(ROOTS, descs)
print(f'\n{len(libraries)} libraries:', file=sys.stderr)
for k in libraries:
    print(f'  {k}', file=sys.stderr)
print('\nstdout begins:', file=sys.stderr)
sys.stderr.flush()

# ------------------------------------------------------------------------------
# Output

out = {k: flattened_target(k, descs) for k in libraries}

for (k,desc) in out.items():
    dep_libs: Set[str] = set()
    for dep_name in set(desc['deps']):
        dep = descs[dep_name]
        if dep['type'] in LIBRARY_TYPES:
            dep_libs.add(dep_name)
    desc['dep_libs'] = sortedi(dep_libs)

json.dump(out, sys.stdout, indent='  ')
exit(0)