File: compare_cts_reports.py

package info (click to toggle)
android-platform-tools 35.0.2-1~exp6
  • links: PTS, VCS
  • area: main
  • in suites: experimental
  • size: 211,716 kB
  • sloc: cpp: 995,749; java: 290,495; ansic: 145,647; xml: 58,531; python: 39,608; sh: 14,500; javascript: 5,198; asm: 4,866; makefile: 3,115; yacc: 769; awk: 368; ruby: 183; sql: 140; perl: 88; lex: 67
file content (323 lines) | stat: -rwxr-xr-x 10,846 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
#!/usr/bin/python3
#
# Copyright (C) 2023 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Compare failed tests in CTS/VTS test_result.xml.

Given two report files (A and B), this script compare them in two modes:
  One-way mode: For all the failed tests in A, list the tests and the results in
                both reports.
  Two-way mode: For all the tests in A and B, list the tests and the results in
                both reports. If a test only exists in one report, show null
                in another one.
  N-way mode: Summarize each module in all reports. The module with the lowest
              pass rate among all reports will be listed at top.

Usage example:
  ./compare_cts_reports.py -r test_result_1.xml test_result_2.xml
  -r test_result_3.xml -m 1 -d output_dir [-o]
  For this command line, the script aggregates test_result_1.xml and
  test_result_2.xml as one report, and then compare it with test_result_3.xml
  under one-way mode. The comparison result is written into output_dir/diff.csv.

  ./compare_cts_reports.py -f parsed_result -r test_result.xml -m n -d tmp/
  For this command line, the script load the report from the directory
  parsed_result/, and then summarize the comparison between this report and
  test_result.xml.
"""

import argparse
import csv
import json
import os
import re
import tempfile

import aggregate_cts_reports
import parse_cts_report
import constant


def one_way_compare(reports, diff_csv):
  """Compare two reports in One-way Mode.

  Given two sets of reports, aggregate them into two reports (A and B).
  Then, list all failed tests in A, and show result of the same test in A and B.

  Args:
    reports: list of reports
    diff_csv: path to csv which stores comparison results
  """

  report_a = reports[0]
  report_b = reports[1]

  with open(diff_csv, 'w') as diff_csvfile:
    diff_writer = csv.writer(diff_csvfile)
    diff_writer.writerow(['module_name', 'abi', 'class_name', 'test_name',
                          'result in A', 'result in B'])

    for keys in report_a.gen_keys_list():
      module_name, abi, class_name, test_name = keys
      result_in_a = report_a.get_test_status(
          module_name, abi, class_name, test_name
      )

      if parse_cts_report.CtsReport.is_fail(result_in_a):
        result_in_b = report_b.get_test_status(
            module_name, abi, class_name, test_name
        )

        diff_writer.writerow(
            [module_name, abi, class_name, test_name, result_in_a, result_in_b]
        )


def two_way_compare(reports, diff_csv):
  """Compare two reports in Two-way Mode.

  Given two sets of reports, aggregate them into two reports (A and B).
  Then, list all tests and show the results in A and B. If a test result exists
  in only one report, consider the result as NO_DATA in another report.

  Args:
    reports: list of reports
    diff_csv: path to csv which stores comparison results
  """

  diff = {}

  for i, report in enumerate(reports):
    for keys in report.gen_keys_list():
      module_name, abi, class_name, test_name = keys

      abis = diff.setdefault(module_name, {})
      test_classes = abis.setdefault(abi, {})
      tests = test_classes.setdefault(class_name, {})

      result = report.get_test_status(module_name, abi, class_name, test_name)

      if test_name not in tests:
        tests[test_name] = [constant.NO_DATA, constant.NO_DATA]

      tests[test_name][i] = result

  with open(diff_csv, 'w') as diff_csvfile:
    diff_writer = csv.writer(diff_csvfile)
    diff_writer.writerow(['module_name', 'abi', 'class_name', 'test_name',
                          'result in A', 'result in B'])

    for module_name, abis in diff.items():
      for abi, test_classes in abis.items():
        for class_name, tests in test_classes.items():
          for test_name, results in tests.items():
            if results[0] != results[1]:
              row = [module_name, abi, class_name, test_name] + results
              diff_writer.writerow(row)


def gen_summary_row(reports, module_with_abi, item):
  """Generate one row of diff.csv.

  According to module_with_abi and item, find the value of each report and
  return as a list.

  Args:
    reports: list of CtsReport object
    module_with_abi: combined module_name and abi
    item: the attribute to find in report

  Returns:
    row: list to write into output file
  """

  row = []

  abi_with_bracket = re.findall(r'\[[^\[^\]]+\]$', module_with_abi)[0]

  module_name = module_with_abi.removesuffix(abi_with_bracket)
  abi = abi_with_bracket[1:-1]

  for report in reports:
    module_summary = (
        report.module_summaries[module_name]
        if module_name in report.module_summaries
        else {}
    )

    summary = module_summary[abi] if abi in module_summary else None

    if not summary:
      row.append(0.0 if item == constant.PASS_RATE else 0)
    elif item == constant.TESTED_ITEMS:
      row.append(summary.tested_items)
    elif item == constant.PASS_RATE:
      row.append(summary.pass_rate)
    elif item in parse_cts_report.CtsReport.STATUS_ORDER:
      row.append(summary.counter[item])
    else:
      raise ValueError(f"Invalid value '{item}' for argument 'item'")

  return row


def n_way_compare(reports, diff_csv):
  """Compare multiple reports in N-way Mode.

  Given multiple sets of reports, aggregate them into reports. Then, summarize
  the results in these reports. Write the summary into diff_csv, where the
  module with the lowest pass rate among all reports will be displayed first.

  Args:
    reports: list of reports
    diff_csv: path to csv which stores comparison results
  """

  modules_min_rate = {}
  report_titles = []

  for i, report in enumerate(reports):
    device_name = (
        f'device_{report.info["build_device"]}'
        if 'build_device' in report.info
        else f'build_id_{report.info["build_id"]}'
    )
    report_titles.append(f'{i}_{device_name}')

    for module_name, abis in report.module_summaries.items():
      for abi, summary in abis.items():
        module_with_abi = f'{module_name}[{abi}]'

        pass_rate = summary.pass_rate

        if module_with_abi not in modules_min_rate:
          modules_min_rate[module_with_abi] = pass_rate if i == 0 else 0.0
        elif pass_rate < modules_min_rate[module_with_abi]:
          modules_min_rate[module_with_abi] = pass_rate

  module_names = modules_min_rate.keys()
  module_order = sorted(
      module_names, key=lambda module_name: modules_min_rate[module_name]
  )

  items = parse_cts_report.CtsReport.STATUS_ORDER + [
      constant.TESTED_ITEMS,
      constant.PASS_RATE,
  ]

  with open(diff_csv, 'w') as diff_csvfile:
    diff_writer = csv.writer(diff_csvfile)
    diff_writer.writerow(['module_with_abi', 'item'] + report_titles)

    for module_with_abi in module_order:
      for item in items:
        row = gen_summary_row(reports, module_with_abi, item)
        diff_writer.writerow([module_with_abi, item] + row)


def load_parsed_report(report_dir, ignore_abi=False):
  """Load CtsReport() from a directory that stores a parsed report."""

  if not os.path.isdir(report_dir):
    raise FileNotFoundError(f'{report_dir} is not a directory')

  info_path = os.path.join(report_dir, 'info.json')
  result_path = os.path.join(report_dir, 'result.csv')

  for f in [info_path, result_path]:
    if not os.path.exists(f):
      raise FileNotFoundError(f"file {f} doesn't exist.")

  with open(info_path, 'r') as info_jsonfile:
    info = json.load(info_jsonfile)

  report = parse_cts_report.CtsReport(info)

  with open(result_path, 'r') as result_csvfile:
    report.load_from_csv(result_csvfile, ignore_abi)

  return report


def main():
  parser = argparse.ArgumentParser()

  parser.add_argument('-r', '--report', nargs='+',
                      dest='cts_reports', action='append',
                      help=('Path to cts reports. Each flag -r is followed by '
                            'a group of files to be aggregated as one report.'))
  parser.add_argument('-f', '--folder',
                      dest='cts_reports', action='append',
                      help=('Path to folder that stores intermediate files '
                            'of parsed reports.'))
  parser.add_argument('--mode', '-m', required=True, choices=['1', '2', 'n'],
                      help=('Comparison mode. 1: One-way mode. '
                            '2: Two-way mode. n: N-way mode.'))
  parser.add_argument('--output-dir', '-d', required=True,
                      help='Directory to store output files.')
  parser.add_argument('--csv', default='diff.csv', help='Path to csv output.')
  parser.add_argument('--output-files', '-o', action='store_true',
                      help='Output parsed csv files.')
  parser.add_argument('--ignore-abi', action='store_true',
                      help='Ignore the tests ABI while comparing.')

  args = parser.parse_args()

  mode = args.mode
  reports = args.cts_reports

  if (mode in ['1', '2']) and (len(reports) != 2):
    msg = 'Two sets of reports are required for one-way and two-way mode.'
    raise UserWarning(msg)

  output_dir = args.output_dir
  if not os.path.exists(output_dir):
    raise FileNotFoundError(f'Output directory {output_dir} does not exist.')

  diff_csv = os.path.join(output_dir, args.csv)

  ctsreports = []
  ignore_abi = args.ignore_abi
  for i, report_path in enumerate(reports):
    # path(s) from the `--report` flag is a list
    is_report_files = isinstance(report_path, list)

    if is_report_files:
      report = aggregate_cts_reports.aggregate_cts_reports(
          report_path, constant.ALL_TEST_ABIS, ignore_abi)
    else:
      report = load_parsed_report(report_path, ignore_abi)

    if is_report_files and args.output_files:
      device_name = report.info['build_device']
      sub_dir_name = tempfile.mkdtemp(
          prefix=f'{i}_{device_name}_', dir=output_dir
      )
      report.output_files(sub_dir_name)

    ctsreports.append(report)

  if args.mode == '1':
    one_way_compare(ctsreports, diff_csv)
  elif args.mode == '2':
    two_way_compare(ctsreports, diff_csv)
  elif args.mode == 'n':
    n_way_compare(ctsreports, diff_csv)
  else:
    raise ValueError(f'Unexpected argument for --mode: {args.mode}')


if __name__ == '__main__':
  main()