File: test_ci.patch

package info (click to toggle)
nodejs 22.21.1%2Bdfsg%2B~cs22.19.0-3
  • links: PTS, VCS
  • area: main
  • in suites: experimental
  • size: 251,924 kB
  • sloc: cpp: 1,542,927; javascript: 597,777; ansic: 114,330; python: 60,784; sh: 4,155; makefile: 2,914; asm: 1,750; pascal: 1,565; perl: 248; lisp: 222; xml: 42
file content (127 lines) | stat: -rw-r--r-- 5,277 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
Description: adapt tools/test.py
 * log to stdout
 * increase timeout multipliers
 * rename --flaky-tests to --flaky-tests-mode and use --flaky-tests like --skip-tests 
Author: Jérémy Lal <kapouer@melix.org>
Forwarded: not-needed
Reviewed-By: Xavier Guimard <yadd@debian.org>
Last-Update: 2025-02-12

--- a/Makefile
+++ b/Makefile
@@ -579,7 +579,7 @@
 test-ci-native: LOGLEVEL := info ## Build and test addons without building anything else.
 test-ci-native: | benchmark/napi/.buildstamp test/addons/.buildstamp test/js-native-api/.buildstamp test/node-api/.buildstamp test/sqlite/.buildstamp
 	$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
-		--mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \
+		--mode=$(BUILDTYPE_LOWER) --flaky-tests-mode=$(FLAKY_TESTS) \
 		$(TEST_CI_ARGS) $(CI_NATIVE_SUITES)
 
 .PHONY: test-ci-js
@@ -587,7 +587,7 @@
 # Related CI job: node-test-commit-arm-fanned
 test-ci-js: | clear-stalled ## Build and test JavaScript with building anything else.
 	$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
-		--mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \
+		--mode=$(BUILDTYPE_LOWER) --flaky-tests-mode=$(FLAKY_TESTS) \
 		--skip-tests=$(CI_SKIP_TESTS) \
 		$(TEST_CI_ARGS) $(CI_JS_SUITES)
 	$(info Clean up any leftover processes, error if found.)
@@ -603,7 +603,7 @@
 test-ci: | clear-stalled bench-addons-build build-addons build-js-native-api-tests build-node-api-tests build-sqlite-tests doc-only
 	out/Release/cctest --gtest_output=xml:out/junit/cctest.xml
 	$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
-		--mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \
+		--mode=$(BUILDTYPE_LOWER) --flaky-tests-mode=$(FLAKY_TESTS) \
 		$(TEST_CI_ARGS) $(CI_JS_SUITES) $(CI_NATIVE_SUITES) $(CI_DOC)
 	$(NODE) ./test/embedding/test-embedding.js
 	$(info Clean up any leftover processes, error if found.)
--- a/test/common/index.js
+++ b/test/common/index.js
@@ -251,21 +251,7 @@
 function platformTimeout(ms) {
   const multipliers = typeof ms === 'bigint' ?
     { two: 2n, four: 4n, seven: 7n } : { two: 2, four: 4, seven: 7 };
-
-  if (isDebug)
-    ms = multipliers.two * ms;
-
-  if (exports.isAIX || exports.isIBMi)
-    return multipliers.two * ms; // Default localhost speed is slower on AIX
-
-  if (isPi())
-    return multipliers.two * ms;  // Raspberry Pi devices
-
-  if (isRiscv64) {
-    return multipliers.four * ms;
-  }
-
-  return ms;
+  return multipliers.seven * ms;
 }
 
 const knownGlobals = new Set([
--- a/tools/test.py
+++ b/tools/test.py
@@ -1407,7 +1407,7 @@
       default=False, action="store_true")
   result.add_argument("--cat", help="Print the source of the tests",
       default=False, action="store_true")
-  result.add_argument("--flaky-tests",
+  result.add_argument("--flaky-tests-mode",
       help="Regard tests marked as flaky (run|skip|dontcare|keep_retrying)",
       default="run")
   result.add_argument("--measure-flakiness",
@@ -1416,6 +1416,9 @@
   result.add_argument("--skip-tests",
       help="Tests that should not be executed (comma-separated)",
       default="")
+  result.add_argument("--flaky-tests",
+      help="Tests that are flaky (comma-separated)",
+      default="")
   result.add_argument("--warn-unused", help="Report unused rules",
       default=False, action="store_true")
   result.add_argument("-j", help="The number of parallel tasks to run, 0=use number of cores",
@@ -1465,6 +1468,7 @@
   options.run = options.run.split(',')
   # Split at commas and filter out all the empty strings.
   options.skip_tests = [test for test in options.skip_tests.split(',') if test]
+  options.flaky_tests = [test for test in options.flaky_tests.split(',') if test]
   if options.run == [""]:
     options.run = None
   elif len(options.run) != 2:
@@ -1492,8 +1496,8 @@
     # -j and ignoring -J, which is the opposite of what we used to do before -J
     # became a legacy no-op.
     print('Warning: Legacy -J option is ignored. Using the -j option.')
-  if options.flaky_tests not in [RUN, SKIP, DONTCARE, KEEP_RETRYING]:
-    print("Unknown flaky-tests mode %s" % options.flaky_tests)
+  if options.flaky_tests_mode not in [RUN, SKIP, DONTCARE, KEEP_RETRYING]:
+    print("Unknown tests mode %s" % options.flaky_tests_mode)
     return False
   return True
 
@@ -1777,11 +1781,13 @@
         sys.exit(1)
 
   def should_keep(case):
+    if any((s in case.file) for s in options.flaky_tests):
+      case.outcomes.add(FLAKY)
     if any((s in case.file) for s in options.skip_tests):
       return False
     elif SKIP in case.outcomes:
       return False
-    elif (options.flaky_tests == SKIP) and (set([SLOW, FLAKY]) & case.outcomes):
+    elif (options.flaky_tests_mode == SKIP) and (set([SLOW, FLAKY]) & case.outcomes):
       return False
     else:
       return True
@@ -1814,7 +1820,7 @@
   else:
     try:
       start = time.time()
-      result = RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests, options.measure_flakiness)
+      result = RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests_mode, options.measure_flakiness)
       exitcode = 0 if result['allPassed'] else 1
       duration = time.time() - start
     except KeyboardInterrupt: