File: test_readable.py

package info (click to toggle)
ceph 18.2.7%2Bds-1
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 1,186,140 kB
  • sloc: cpp: 6,278,388; ansic: 3,507,431; python: 372,964; asm: 216,381; java: 133,450; sh: 125,043; xml: 39,398; ruby: 32,026; makefile: 29,004; javascript: 23,994; cs: 18,980; perl: 9,708; sql: 7,833; lisp: 5,920; pascal: 3,109; ada: 1,681; yacc: 478; awk: 188; f90: 55; php: 1
file content (340 lines) | stat: -rwxr-xr-x 13,302 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
#!/usr/bin/env python3
import json
import os
import sys
import subprocess
import tempfile
import difflib
from typing import Dict, Any
from pathlib import Path
import concurrent.futures
from collections import OrderedDict

temp_unrec = tempfile.mktemp(prefix="unrecognized_")
err_file_rc = tempfile.mktemp(prefix="dencoder_err_")

fast_shouldnt_skip = []
backward_compat: Dict[str, Any] = {}
incompat_paths: Dict[str, Any] = {}

def sort_values(obj):
    if isinstance(obj, dict):
        return OrderedDict((k, sort_values(v)) for k, v in obj.items())
    if isinstance(obj, list):
        return sorted(obj, key=sort_list_values)
    return obj

def sort_list_values(obj):
    if isinstance(obj, dict):
        return sorted(obj.items())
    if isinstance(obj, list):
        return sorted(obj, key=sort_list_values)
    return obj


def process_type(file_path, type):
    print(f"dencoder test for {file_path}")
    cmd1 = [CEPH_DENCODER, "type", type, "import", file_path, "decode", "dump_json"]
    cmd2 = [CEPH_DENCODER, "type", type, "import", file_path, "decode", "encode", "decode", "dump_json"]

    output1 = ""
    output2 = ""
    try:
        result1 = subprocess.run(cmd1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        output1 = result1.stdout.decode('unicode_escape')
        result2 = subprocess.run(cmd2, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        output2 = result2.stdout.decode('unicode_escape')

        if result1.returncode != 0 or result2.returncode != 0:
            debug_print(f"**** reencode of {file_path} resulted in wrong return code ****")
            print(f"Error encountered in subprocess. Command: {cmd1}")
            print(f"Return code: {result1.returncode} Command:{result1.args} Output: {result1.stdout.decode('unicode_escape')}")
            print(f"Error encountered in subprocess. Command: {cmd2}")
            print(f"Return code: {result2.returncode} Command:{result2.args} Output: {result2.stdout.decode('unicode_escape')}")
            
            with open(err_file_rc, "a") as f:
                f.write(f"{type} -- {file_path}")
                f.write("\n")
            return 1

        if output1 != output2:
            cmd_determ = [CEPH_DENCODER, "type", type, "is_deterministic"]
            determ_res = subprocess.run(cmd_determ, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
             # Check if the command failed
            if determ_res.returncode != 0 and determ_res.returncode != 1:
                error_message = determ_res.stderr.decode().strip()
                debug_print(f"Error running command: {error_message}")
                return 1

            json_output1 = json.loads(output1)
            sorted_json_output1 = json.dumps(sort_values(json_output1), indent=4)
            json_output2 = json.loads(output2)
            sorted_json_output2 = json.dumps(sort_values(json_output2), indent=4)
            if sorted_json_output1 == sorted_json_output2:
                debug_print(f"non-deterministic type {type} passed the test")
                return 0
            
            debug_print(f"**** reencode of {file_path} resulted in a different dump ****")
            diff_output = "\n".join(difflib.ndiff(output1.splitlines(), output2.splitlines()))
            diff_file   = tempfile.mktemp(prefix=f"diff_{type}_{file_path.name}_")
            with open(diff_file, "w") as f:
                f.write(diff_output)
            print(f"Different output for {file_path}:\n{diff_output}")
            return 1  # File failed the test

    except subprocess.CalledProcessError as e:
        print(f"Error encountered in subprocess. Command: {cmd1}")
        print(f"Return code: {e.returncode} Command:{e.cmd} Output: {e.output}")
        return 1

    except UnicodeDecodeError as e:
        print(f"Unicode Error encountered in subprocess. Command: {cmd1}")
        print(f"Return code: {e.returncode} Command:{e.cmd} Output: {e.output}")
        return 1

    return 0  # File passed the test

def test_object_wrapper(type, vdir, arversion, current_ver):
    global incompat_paths
    _numtests = 0
    _failed = 0
    unrecognized = ""

    if subprocess.call([CEPH_DENCODER, "type", type], stderr=subprocess.DEVNULL) == 0:

        if should_skip_object(type, arversion, current_ver) and (type not in incompat_paths or len(incompat_paths[type]) == 0):
            debug_print(f"skipping object of type {type} due to backward incompatibility")
            return (_numtests, _failed, unrecognized)

        debug_print(f"        {vdir}/objects/{type}")
        files = list(vdir.joinpath("objects", type).glob('*'))
        files_without_incompat = []

        # Check symbolic links
        if type in incompat_paths:
            incompatible_files = set(incompat_paths[type])
            files_without_incompat = [f for f in files if f.name not in incompatible_files]
        else:
            files_without_incompat = files

        with concurrent.futures.ThreadPoolExecutor() as executor:
            results = [executor.submit(process_type, f, type) for f in files_without_incompat]

            for result in concurrent.futures.as_completed(results):
                _numtests += 1
                _failed += result.result()
    else:
        unrecognized = type
        debug_print("skipping unrecognized type {} return {}".format(type, (_numtests, _failed, unrecognized)))
        
    return (_numtests, _failed, unrecognized)

def should_skip_object(type, arversion, current_ver):
    """
    Check if an object of a specific type should be skipped based on backward compatibility.

    Description:
    This function determines whether an object of a given type should be skipped based on the
    provided versions and backward compatibility information. It checks the global variable
    'backward_compat' to make this decision.

    Input:
    - type: str
        The type of the object to be checked for skipping.

    - arversion: str
        The version from which the object is attempted to be accessed (archive version).

    - current_ver: str
        The version of the object being processed (current version).

    Output:
    - bool:
        True if the object should be skipped, False otherwise.

    Note: The function relies on two global variables, 'backward_compat' and 'fast_shouldnt_skip',
    which should be defined and updated appropriately in the calling code.
    """
    global backward_compat
    global fast_shouldnt_skip

    if type in fast_shouldnt_skip:
        debug_print(f"fast Type {type} does not exist in the backward compatibility structure.")
        return False

    if all(type not in v for v in backward_compat.values()):
        fast_shouldnt_skip.append(type)
        return False

    versions = [key for key, value in backward_compat.items() if type in value and key >= arversion and key != current_ver]
    if len(versions) == 0:
        return False

    return True

def check_backward_compat():
    """
    Check backward compatibility and collect incompatible paths for different versions and types.

    Description:
    This function scans the 'archive' directory and identifies backward incompatible paths
    for each version and type in the archive. It creates dictionaries '_backward_compat' and
    '_incompat_paths_all' to store the results.

    Input:
    - None (No explicit input required)

    Output:
    - _backward_compat: dict
        A nested dictionary containing backward incompatible paths for each version and type.
        The structure is as follows:
        {
            "version_name1": {
                "type_name1": ["incompat_path1", "incompat_path2", ...],
                "type_name2": ["incompat_path3", "incompat_path4", ...],
                ...
            },
            "version_name2": {
                ...
            },
            ...
        }
        
    - _incompat_paths_all: dict
        A dictionary containing all backward incompatible paths for each type across all versions.
        The structure is as follows:
        {
            "type_name1": ["incompat_path1", "incompat_path2", ...],
            "type_name2": ["incompat_path3", "incompat_path4", ...],
            ...
        }

    Note: The function uses the global variable 'DIR', which should be defined in the calling code.

    """
    _backward_compat = {}
    _incompat_paths_all = {}
    archive_dir = Path(os.path.join(DIR, 'archive'))
    
    if archive_dir.exists() and archive_dir.is_dir():
        for version in archive_dir.iterdir():
            if version.is_dir():
                version_name = version.name
                _backward_compat[version_name] = {}
                type_dir = archive_dir / version_name / "forward_incompat"
                if type_dir.exists():
                    for type_entry in type_dir.iterdir():
                        if type_entry.is_dir():
                            type_name = type_entry.name
                            type_path = type_dir / type_name
                            if type_path.exists() and type_path.is_dir():
                                _incompat_paths = [incompat_entry.name for incompat_entry in type_path.iterdir() if incompat_entry.is_dir() or 
                                                                                                                incompat_entry.is_file() or 
                                                                                                                incompat_entry.is_symlink()]
                                _backward_compat[version_name][type_name] = _incompat_paths
                                _incompat_paths_all[type_name] = _incompat_paths
                                _incompat_paths = []
                        else:
                            _backward_compat[version_name][type_entry.name] = []
    debug_print(f"backward_compat: {_backward_compat}")
    debug_print(f"incompat_paths: {_incompat_paths_all}")

    return _backward_compat, _incompat_paths_all

def process_batch(batch):
    results = []
    max_workers = 15
    with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
        futures = [
            executor.submit(
                test_object_wrapper, batch_type, vdir, arversion, current_ver
            )
            for batch_type, vdir, arversion, current_ver in batch
        ]

        for future in concurrent.futures.as_completed(futures):
            result_tuple = future.result()
            results.append(result_tuple)

    return results

# Create a generator that processes batches asynchronously
def async_process_batches(task_batches):
    max_workers = 10
    with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
        futures = [executor.submit(process_batch, batch) for batch in task_batches]
        for future in concurrent.futures.as_completed(futures):
            yield future.result()

def debug_print(msg):
    if debug:
        print("DEBUG: {}".format(msg))


def main():
    global backward_compat
    global incompat_paths

    failed = 0
    numtests = 0
    task_batches = []
    current_batch = []
    batch_size = 100
    
    backward_compat, incompat_paths = check_backward_compat()
    debug_print(f'found {len(backward_compat)} backward incompatibilities')

    for arversion_entry in sorted(DIR.joinpath("archive").iterdir(), key=lambda entry: entry.name):
        arversion = arversion_entry.name
        vdir = Path(DIR.joinpath("archive", arversion))

        if not arversion_entry.is_dir() or not vdir.joinpath("objects").is_dir():
            debug_print("skipping non-directory {}".format(arversion))
            continue

        for type_entry in vdir.joinpath("objects").iterdir():
            type = type_entry.name
            current_batch.append((type, vdir, arversion, current_ver))
            if len(current_batch) >= batch_size:
                task_batches.append(current_batch)
                current_batch = []

    if len(current_batch) > 0:
        task_batches.append(current_batch)
    
    full_unrecognized = []
    for results in async_process_batches(task_batches):
        for result in results:
            _numtests, _failed, unrecognized = result
            debug_print("numtests: {}, failed: {}".format(_numtests, _failed))
            numtests += _numtests
            failed += _failed
            if unrecognized.strip() != '':
                full_unrecognized.append(unrecognized)
    
    if full_unrecognized is not None and len(full_unrecognized) > 0:
        with open(temp_unrec, "a") as file_unrec:
            file_unrec.writelines(line + "\n" for line in full_unrecognized)

    if failed > 0:
        print("FAILED {}/{} tests.".format(failed, numtests))
        return 1

    if numtests == 0:
        print("FAILED: no tests found to run!")

    print("Passed {} tests.".format(numtests))
    return 0

if __name__ == "__main__":
    if len(sys.argv) < 1:
        print(f"usage: {sys.argv[0]} <corpus-dir>")
        sys.exit(1)

    DIR = Path(sys.argv[1])
    CEPH_DENCODER = "ceph-dencoder"
    subprocess.run([CEPH_DENCODER, 'version'], check=True)
    current_ver = subprocess.check_output([CEPH_DENCODER, "version"]).decode().strip()
    debug = False
    ret = main()
    sys.exit(ret)