1 #
2 # Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
3 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 #
5 # This code is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License version 2 only, as
7 # published by the Free Software Foundation. Oracle designates this
8 # particular file as subject to the "Classpath" exception as provided
9 # by Oracle in the LICENSE file that accompanied this code.
10 #
11 # This code is distributed in the hope that it will be useful, but WITHOUT
12 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # version 2 for more details (a copy is included in the LICENSE file that
15 # accompanied this code).
16 #
17 # You should have received a copy of the GNU General Public License version
18 # 2 along with this work; if not, write to the Free Software Foundation,
19 # Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 #
21 # Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 # or visit www.oracle.com if you need additional information or have any
23 # questions.
24 #
25
26 include MakeFileStart.gmk
27
28 ################################################################################
29
30 include FindTests.gmk
31
32 # We will always run multiple tests serially
33 .NOTPARALLEL:
34
35 ################################################################################
36 # Parse global control variables
37 ################################################################################
38
39 ifneq ($(TEST_VM_OPTS), )
40 ifneq ($(TEST_OPTS), )
41 TEST_OPTS := $(TEST_OPTS);VM_OPTIONS=$(TEST_VM_OPTS)
42 else
43 TEST_OPTS := VM_OPTIONS=$(TEST_VM_OPTS)
44 endif
45 endif
46
47 $(eval $(call ParseKeywordVariable, TEST_OPTS, \
48 SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR JCOV JCOV_DIFF_CHANGESET AOT_JDK, \
49 STRING_KEYWORDS := VM_OPTIONS JAVA_OPTIONS, \
50 ))
51
52 # Helper function to propagate TEST_OPTS values.
53 #
54 # Note: No spaces are allowed around the arguments.
55 # Arg $1 The variable in TEST_OPTS to propagate
56 # Arg $2 The control variable to propagate it to
57 define SetTestOpt
58 ifneq ($$(TEST_OPTS_$1), )
59 $2_$1 := $$(TEST_OPTS_$1)
60 endif
61 endef
62
63 # Setup _NT_SYMBOL_PATH on Windows, which points to our pdb files.
64 ifeq ($(call isTargetOs, windows), true)
65 ifndef _NT_SYMBOL_PATH
66 SYMBOL_PATH := $(call PathList, $(sort $(patsubst %/, %, $(dir $(wildcard \
67 $(addprefix $(SYMBOLS_IMAGE_DIR)/bin/, *.pdb */*.pdb))))))
68 export _NT_SYMBOL_PATH := $(subst \\,\, $(call FixPath, \
69 $(subst $(DQUOTE),, $(SYMBOL_PATH))))
70 $(call LogDebug, Setting _NT_SYMBOL_PATH to $(_NT_SYMBOL_PATH))
71 endif
72 endif
73
74 ################################################################################
75
76 # This is the JDK that we will test
77 JDK_UNDER_TEST := $(JDK_IMAGE_DIR)
78
79 TEST_RESULTS_DIR := $(OUTPUTDIR)/test-results
80 TEST_SUPPORT_DIR := $(OUTPUTDIR)/test-support
81 TEST_SUMMARY := $(TEST_RESULTS_DIR)/test-summary.txt
82 TEST_LAST_IDS := $(TEST_SUPPORT_DIR)/test-last-ids.txt
83
84 ifeq ($(CUSTOM_ROOT), )
85 JTREG_TOPDIR := $(TOPDIR)
86 else
87 JTREG_TOPDIR := $(CUSTOM_ROOT)
88 endif
89
90 JTREG_FAILURE_HANDLER_DIR := $(TEST_IMAGE_DIR)/failure_handler
91 JTREG_FAILURE_HANDLER := $(JTREG_FAILURE_HANDLER_DIR)/jtregFailureHandler.jar
92
93 JTREG_TEST_THREAD_FACTORY_DIR := $(TEST_IMAGE_DIR)/jtreg_test_thread_factory
94 JTREG_TEST_THREAD_FACTORY_JAR := $(JTREG_TEST_THREAD_FACTORY_DIR)/jtregTestThreadFactory.jar
95
96 JTREG_FAILURE_HANDLER_TIMEOUT ?= 0
97
98 ifneq ($(wildcard $(JTREG_FAILURE_HANDLER)), )
99 JTREG_FAILURE_HANDLER_OPTIONS := \
100 -timeoutHandlerDir:$(JTREG_FAILURE_HANDLER) \
101 -observerDir:$(JTREG_FAILURE_HANDLER) \
102 -timeoutHandler:jdk.test.failurehandler.jtreg.GatherProcessInfoTimeoutHandler \
103 -observer:jdk.test.failurehandler.jtreg.GatherDiagnosticInfoObserver \
104 -timeoutHandlerTimeout:$(JTREG_FAILURE_HANDLER_TIMEOUT) \
105 #
106 endif
107
108 GTEST_LAUNCHER_DIRS := $(patsubst %/gtestLauncher$(EXECUTABLE_SUFFIX), %, \
109 $(wildcard $(TEST_IMAGE_DIR)/hotspot/gtest/*/gtestLauncher$(EXECUTABLE_SUFFIX)))
110 GTEST_VARIANTS := $(strip $(patsubst $(TEST_IMAGE_DIR)/hotspot/gtest/%, %, \
111 $(GTEST_LAUNCHER_DIRS)))
112
113 COV_ENVIRONMENT :=
114 JTREG_COV_OPTIONS :=
115
116 ifeq ($(TEST_OPTS_JCOV), true)
117 JCOV_OUTPUT_DIR := $(TEST_RESULTS_DIR)/jcov-output
118 JCOV_SUPPORT_DIR := $(TEST_SUPPORT_DIR)/jcov-support
119 JCOV_GRABBER_LOG := $(JCOV_OUTPUT_DIR)/grabber.log
120 JCOV_RESULT_FILE := $(JCOV_OUTPUT_DIR)/result.xml
121 JCOV_REPORT := $(JCOV_OUTPUT_DIR)/report
122 JCOV_MEM_OPTIONS := -Xms64m -Xmx4g
123
124 # Replace our normal test JDK with the JCov image.
125 JDK_UNDER_TEST := $(JCOV_IMAGE_DIR)
126
127 COV_ENVIRONMENT += JAVA_TOOL_OPTIONS="$(JCOV_MEM_OPTIONS)" \
128 _JAVA_OPTIONS="$(JCOV_MEM_OPTIONS)"
129 JTREG_COV_OPTIONS += -e:JAVA_TOOL_OPTIONS='$(JCOV_MEM_OPTIONS)' \
130 -e:_JAVA_OPTIONS='$(JCOV_MEM_OPTIONS)'
131 endif
132
133 ifeq ($(GCOV_ENABLED), true)
134 GCOV_OUTPUT_DIR := $(TEST_RESULTS_DIR)/gcov-output
135 COV_ENVIRONMENT += GCOV_PREFIX="$(GCOV_OUTPUT_DIR)"
136 JTREG_COV_OPTIONS += -e:GCOV_PREFIX="$(GCOV_OUTPUT_DIR)"
137 endif
138
139 ################################################################################
140 # Setup global test running parameters
141 ################################################################################
142
143 # Each factor variable comes in 3 variants. The first one is reserved for users
144 # to use on command line. The other two are for predefined configurations in JDL
145 # and for machine specific configurations respectively.
146 TEST_JOBS_FACTOR ?= 1
147 TEST_JOBS_FACTOR_JDL ?= 1
148 TEST_JOBS_FACTOR_MACHINE ?= 1
149
150 ifeq ($(TEST_JOBS), 0)
151 CORES_DIVIDER := 2
152 # For some big multi-core machines with low ulimit -u setting we hit the max
153 # threads/process limit. In such a setup the memory/cores-only-guided
154 # TEST_JOBS config is insufficient. From experience a concurrency setting of
155 # 14 works reasonably well for low ulimit values (<= 4096). Thus, use
156 # divider 4096/14. For high ulimit -u values this shouldn't make a difference.
157 ULIMIT_DIVIDER := (4096/14)
158 PROC_ULIMIT := -1
159 ifneq ($(OPENJDK_TARGET_OS), windows)
160 PROC_ULIMIT := $(shell $(ULIMIT) -u)
161 ifeq ($(PROC_ULIMIT), unlimited)
162 PROC_ULIMIT := -1
163 endif
164 endif
165 MEMORY_DIVIDER := 2048
166 TEST_JOBS := $(shell $(AWK) \
167 'BEGIN { \
168 c = $(NUM_CORES) / $(CORES_DIVIDER); \
169 m = $(MEMORY_SIZE) / $(MEMORY_DIVIDER); \
170 u = $(PROC_ULIMIT); \
171 if (u > -1) { \
172 u = u / $(ULIMIT_DIVIDER); \
173 if (u < c) c = u; \
174 } \
175 if (c > m) c = m; \
176 c = c * $(TEST_JOBS_FACTOR); \
177 c = c * $(TEST_JOBS_FACTOR_JDL); \
178 c = c * $(TEST_JOBS_FACTOR_MACHINE); \
179 if (c < 1) c = 1; \
180 c = c + 0.5; \
181 printf "%d", c; \
182 }')
183 endif
184
185 ################################################################################
186 # Parse control variables
187 ################################################################################
188
189 ifneq ($(TEST_OPTS), )
190 # Inform the user
191 $(info Running tests using TEST_OPTS control variable '$(TEST_OPTS)')
192 endif
193
194 ### Jtreg
195
196 $(eval $(call SetTestOpt,VM_OPTIONS,JTREG))
197 $(eval $(call SetTestOpt,JAVA_OPTIONS,JTREG))
198
199 $(eval $(call SetTestOpt,JOBS,JTREG))
200 $(eval $(call SetTestOpt,TIMEOUT_FACTOR,JTREG))
201 $(eval $(call SetTestOpt,FAILURE_HANDLER_TIMEOUT,JTREG))
202 $(eval $(call SetTestOpt,REPORT,JTREG))
203 $(eval $(call SetTestOpt,AOT_JDK,JTREG))
204
205 $(eval $(call ParseKeywordVariable, JTREG, \
206 SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR FAILURE_HANDLER_TIMEOUT \
207 TEST_MODE ASSERT VERBOSE RETAIN TEST_THREAD_FACTORY JVMTI_STRESS_AGENT \
208 MAX_MEM RUN_PROBLEM_LISTS RETRY_COUNT REPEAT_COUNT MAX_OUTPUT REPORT \
209 AOT_JDK MANUAL $(CUSTOM_JTREG_SINGLE_KEYWORDS), \
210 STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS KEYWORDS \
211 EXTRA_PROBLEM_LISTS LAUNCHER_OPTIONS \
212 $(CUSTOM_JTREG_STRING_KEYWORDS), \
213 ))
214
215 ifneq ($(JTREG), )
216 # Inform the user
217 $(info Running tests using JTREG control variable '$(JTREG)')
218 endif
219
220 ### Gtest
221
222 $(eval $(call SetTestOpt,VM_OPTIONS,GTEST))
223 $(eval $(call SetTestOpt,JAVA_OPTIONS,GTEST))
224
225 $(eval $(call ParseKeywordVariable, GTEST, \
226 SINGLE_KEYWORDS := REPEAT, \
227 STRING_KEYWORDS := OPTIONS VM_OPTIONS JAVA_OPTIONS, \
228 ))
229
230 ifneq ($(GTEST), )
231 # Inform the user
232 $(info Running tests using GTEST control variable '$(GTEST)')
233 endif
234
235 ### Microbenchmarks
236
237 $(eval $(call SetTestOpt,VM_OPTIONS,MICRO))
238 $(eval $(call SetTestOpt,JAVA_OPTIONS,MICRO))
239
240 $(eval $(call ParseKeywordVariable, MICRO, \
241 SINGLE_KEYWORDS := ITER FORK TIME WARMUP_ITER WARMUP_TIME, \
242 STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS RESULTS_FORMAT TEST_JDK \
243 BENCHMARKS_JAR, \
244 ))
245
246 ifneq ($(MICRO), )
247 # Inform the user
248 $(info Running tests using MICRO control variable '$(MICRO)')
249 endif
250
251
252 ################################################################################
253 # Component-specific Jtreg settings
254 ################################################################################
255
256 hotspot_JTREG_MAX_MEM := 0
257 hotspot_JTREG_ASSERT := false
258 hotspot_JTREG_NATIVEPATH := $(TEST_IMAGE_DIR)/hotspot/jtreg/native
259 jdk_JTREG_NATIVEPATH := $(TEST_IMAGE_DIR)/jdk/jtreg/native
260 lib-test_JTREG_NATIVEPATH := $(TEST_IMAGE_DIR)/lib-test/jtreg/native
261
262 jdk_JTREG_PROBLEM_LIST += $(TOPDIR)/test/jdk/ProblemList.txt
263 jaxp_JTREG_PROBLEM_LIST += $(TOPDIR)/test/jaxp/ProblemList.txt
264 langtools_JTREG_PROBLEM_LIST += $(TOPDIR)/test/langtools/ProblemList.txt
265 hotspot_JTREG_PROBLEM_LIST += $(TOPDIR)/test/hotspot/jtreg/ProblemList.txt
266 lib-test_JTREG_PROBLEM_LIST += $(TOPDIR)/test/lib-test/ProblemList.txt
267 docs_JTREG_PROBLEM_LIST += $(TOPDIR)/test/docs/ProblemList.txt
268
269 ################################################################################
270 # Parse test selection
271 #
272 # The user has given a test selection in the TEST variable. We must parse it
273 # and determine what that means in terms of actual calls to the test framework.
274 #
275 # The parse functions take as argument a test specification as given by the
276 # user, and returns a fully qualified test descriptor if it was a match, or
277 # nothing if not. A single test specification can result in multiple test
278 # descriptors being returned. A valid test descriptor must always be accepted
279 # and returned identically.
280 ################################################################################
281
282 # Helper function to determine if a test specification is a Gtest test
283 #
284 # It is a Gtest test if it is either "gtest", or "gtest:" followed by an optional
285 # test filter string, and an optional "/<variant>" to select a specific JVM
286 # variant. If no variant is specified, all found variants are tested.
287 define ParseGtestTestSelection
288 $(if $(filter gtest%, $1), \
289 $(if $(filter gtest, $1), \
290 $(addprefix gtest:all/, $(GTEST_VARIANTS)) \
291 , \
292 $(if $(strip $(or $(filter gtest/%, $1) $(filter gtest:/%, $1))), \
293 $(patsubst gtest:/%, gtest:all/%, $(patsubst gtest/%, gtest:/%, $1)) \
294 , \
295 $(if $(filter gtest:%, $1), \
296 $(if $(findstring /, $1), \
297 $1 \
298 , \
299 $(addprefix $1/, $(GTEST_VARIANTS)) \
300 ) \
301 ) \
302 ) \
303 ) \
304 )
305 endef
306
307 # Helper function to determine if a test specification is a microbenchmark test
308 #
309 # It is a microbenchmark test if it is either "micro", or "micro:" followed by
310 # an optional test filter string.
311 define ParseMicroTestSelection
312 $(if $(filter micro%, $1), \
313 $(if $(filter micro, $1), \
314 micro:all \
315 , \
316 $(if $(filter micro:, $1), \
317 micro:all \
318 , \
319 $1 \
320 ) \
321 ) \
322 )
323 endef
324
325 # Helper function that removes the TOPDIR part
326 CleanupJtregPath = \
327 $(strip $(patsubst %/, %, $(subst $(JTREG_TOPDIR)/,, $1)))
328
329 # Take a partial Jtreg root path and return a full, absolute path to that Jtreg
330 # root. Also support having "hotspot" as an alias for "hotspot/jtreg".
331 ExpandJtregRoot = \
332 $(call CleanupJtregPath, $(wildcard \
333 $(if $(filter /%, $1), \
334 $(if $(wildcard $(strip $1)/TEST.ROOT), \
335 $1 \
336 ) \
337 , \
338 $(filter $(addprefix %, $1), $(JTREG_TESTROOTS) $(addsuffix /, $(JTREG_TESTROOTS))) \
339 $(filter $(addprefix %, $(strip $1)/jtreg), $(JTREG_TESTROOTS) $(addsuffix /, $(JTREG_TESTROOTS))) \
340 ) \
341 ))
342
343 # Take a partial Jtreg test path and return a full, absolute path to that Jtreg
344 # test. Also support having "hotspot" as an alias for "hotspot/jtreg".
345 ExpandJtregPath = \
346 $(if $(call ExpandJtregRoot, $1), \
347 $(call ExpandJtregRoot, $1) \
348 , \
349 $(call CleanupJtregPath, $(wildcard \
350 $(if $(filter /%, $1), \
351 $1 \
352 , \
353 $(addsuffix /$(strip $1), $(JTREG_TESTROOTS) $(TEST_BASEDIRS)) \
354 $(addsuffix $(strip $(patsubst hotspot/%, /hotspot/jtreg/%, $1)), $(JTREG_TESTROOTS) $(TEST_BASEDIRS)) \
355 ) \
356 )) \
357 )
358
359 # with test id: dir/Test.java#selection -> Test.java#selection -> .java#selection -> #selection
360 # without: dir/Test.java -> Test.java -> .java -> <<empty string>>
361 TestID = \
362 $(subst .jasm,,$(subst .sh,,$(subst .html,,$(subst .java,,$(suffix $(notdir $1))))))
363
364 # The test id starting with a hash (#testid) will be stripped by all
365 # evals in ParseJtregTestSelectionInner and will be reinserted by calling
366 # TestID (if it is present).
367 ParseJtregTestSelection = \
368 $(call IfAppend, $(call ParseJtregTestSelectionInner, $1), $(call TestID, $1))
369
370 # Helper function to determine if a test specification is a Jtreg test
371 #
372 # It is a Jtreg test if it optionally begins with jtreg:, and then is either
373 # an unspecified group name (possibly prefixed by :), or a group in a
374 # specified test root, or a path to a test or test directory,
375 # either absolute or relative to any of the TEST_BASEDIRS or test roots.
376 define ParseJtregTestSelectionInner
377 $(eval TEST_NAME := $(strip $(patsubst jtreg:%, %, $1))) \
378 $(if $(or $(findstring :, $(TEST_NAME)), $(findstring /, $(TEST_NAME))), , \
379 $(eval TEST_NAME := :$(TEST_NAME)) \
380 ) \
381 $(if $(findstring :, $(TEST_NAME)), \
382 $(if $(filter :%, $(TEST_NAME)), \
383 $(eval TEST_GROUP := $(patsubst :%, %, $(TEST_NAME))) \
384 $(eval TEST_ROOTS := $(foreach test_root, $(JTREG_TESTROOTS), \
385 $(call CleanupJtregPath, $(test_root)))) \
386 , \
387 $(eval TEST_PATH := $(word 1, $(subst :, $(SPACE), $(TEST_NAME)))) \
388 $(eval TEST_GROUP := $(word 2, $(subst :, $(SPACE), $(TEST_NAME)))) \
389 $(eval TEST_ROOTS := $(call ExpandJtregRoot, $(TEST_PATH))) \
390 ) \
391 $(foreach test_root, $(TEST_ROOTS), \
392 $(if $(filter /%, $(test_root)), \
393 jtreg:$(test_root):$(TEST_GROUP) \
394 , \
395 $(if $(filter $(TEST_GROUP), $($(JTREG_TOPDIR)/$(test_root)_JTREG_TEST_GROUPS)), \
396 jtreg:$(test_root):$(TEST_GROUP) \
397 ) \
398 ) \
399 ) \
400 , \
401 $(eval TEST_PATHS := $(call ExpandJtregPath, $(TEST_NAME))) \
402 $(foreach test_path, $(TEST_PATHS), \
403 jtreg:$(test_path) \
404 ) \
405 )
406 endef
407
408 # Helper function to determine if a test specification is a special test
409 #
410 # It is a special test if it is "special:" followed by a test name,
411 # if it is "make:" or "make-" followed by a make test, or any of the special
412 # test names as a single word.
413 define ParseSpecialTestSelection
414 $(if $(filter special:%, $1), \
415 $1 \
416 ) \
417 $(if $(filter make%, $1), \
418 $(if $(filter make:%, $1), \
419 special:$(strip $1) \
420 ) \
421 $(if $(filter make-%, $1), \
422 special:$(patsubst make-%,make:%, $1) \
423 ) \
424 $(if $(filter make, $1), \
425 special:make:all \
426 )
427 ) \
428 $(if $(filter failure-handler, $1), \
429 special:$(strip $1) \
430 )
431 endef
432
433 ifeq ($(TEST), )
434 $(info No test selection given in TEST!)
435 $(info Please use e.g. 'make test TEST=tier1' or 'make test-tier1')
436 $(info See doc/testing.[md|html] for help)
437 $(error Cannot continue)
438 endif
439
440 ParseTestSelection = \
441 $(strip $(or \
442 $(call ParseCustomTestSelection, $1) \
443 $(call ParseGtestTestSelection, $1) \
444 $(call ParseMicroTestSelection, $1) \
445 $(call ParseJtregTestSelection, $1) \
446 $(call ParseSpecialTestSelection, $1) \
447 ))
448
449 # Now intelligently convert the test selection given by the user in TEST
450 # into a list of fully qualified test descriptors of the tests to run.
451 TESTS_TO_RUN := $(strip $(foreach test, $(TEST), $(call ParseTestSelection, $(test))))
452 UNKNOWN_TEST := $(strip $(foreach test, $(TEST), $(if $(call ParseTestSelection, $(test)), , $(test))))
453
454 ifneq ($(UNKNOWN_TEST), )
455 $(info Unknown test selection: '$(UNKNOWN_TEST)')
456 $(info See doc/testing.[md|html] for help)
457 $(error Cannot continue)
458 endif
459
460 # Present the result of our parsing to the user
461 $(info Test selection '$(TEST)', will run:)
462 $(foreach test, $(TESTS_TO_RUN), $(info * $(test)))
463
464
465 ################################################################################
466 # Functions for setting up rules for running the selected tests
467 #
468 # The SetupRun*Test functions all have the same interface:
469 #
470 # Parameter 1 is the name of the rule. This is the test id, based on the test
471 # descriptor, and this is also used as variable prefix, and the targets
472 # generated are listed in a variable by that name.
473 #
474 # Remaining parameters are named arguments. Currently this is only:
475 # TEST -- The properly formatted fully qualified test descriptor
476 #
477 # After the rule named by the test id has been executed, the following
478 # variables will be available:
479 # testid_TOTAL - the total number of tests run
480 # testid_PASSED - the number of successful tests
481 # testid_FAILED - the number of failed tests
482 # testid_ERROR - the number of tests was neither successful or failed
483 #
484 ################################################################################
485
486 ### Rules for Gtest
487
488 SetupRunGtestTest = $(NamedParamsMacroTemplate)
489 define SetupRunGtestTestBody
490 $1_TEST_RESULTS_DIR := $$(TEST_RESULTS_DIR)/$1
491 $1_TEST_SUPPORT_DIR := $$(TEST_SUPPORT_DIR)/$1
492 $1_EXITCODE := $$($1_TEST_RESULTS_DIR)/exitcode.txt
493
494 $1_VARIANT := $$(lastword $$(subst /, , $$($1_TEST)))
495 ifeq ($$(filter $$($1_VARIANT), $$(GTEST_VARIANTS)), )
496 $$(error Invalid gtest variant '$$($1_VARIANT)'. Valid variants: $$(GTEST_VARIANTS))
497 endif
498 $1_TEST_NAME := $$(strip $$(patsubst %/$$($1_VARIANT), %, \
499 $$(patsubst gtest:%, %, $$($1_TEST))))
500 ifneq ($$($1_TEST_NAME), all)
501 $1_GTEST_FILTER := --gtest_filter=$$($1_TEST_NAME)*
502 endif
503
504 ifneq ($$(GTEST_REPEAT), )
505 $1_GTEST_REPEAT := --gtest_repeat=$$(GTEST_REPEAT)
506 endif
507
508 run-test-$1: pre-run-test
509 $$(call LogWarn)
510 $$(call LogWarn, Running test '$$($1_TEST)')
511 $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
512 $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/gtest, \
513 $$(CD) $$($1_TEST_SUPPORT_DIR) && \
514 $$(FIXPATH) $$(TEST_IMAGE_DIR)/hotspot/gtest/$$($1_VARIANT)/gtestLauncher \
515 -jdk $(JDK_UNDER_TEST) $$($1_GTEST_FILTER) \
516 --gtest_output=xml:$$($1_TEST_RESULTS_DIR)/gtest.xml \
517 --gtest_catch_exceptions=0 \
518 $$($1_GTEST_REPEAT) $$(GTEST_OPTIONS) $$(GTEST_VM_OPTIONS) \
519 $$(GTEST_JAVA_OPTIONS) \
520 > >($(TEE) $$($1_TEST_RESULTS_DIR)/gtest.txt) \
521 && $$(ECHO) $$$$? > $$($1_EXITCODE) \
522 || $$(ECHO) $$$$? > $$($1_EXITCODE) \
523 )
524
525 $1_RESULT_FILE := $$($1_TEST_RESULTS_DIR)/gtest.txt
526
527 parse-test-$1: run-test-$1
528 $$(call LogWarn, Finished running test '$$($1_TEST)')
529 $$(call LogWarn, Test report is stored in $$(strip \
530 $$(subst $$(TOPDIR)/, , $$($1_TEST_RESULTS_DIR))))
531 $$(if $$(wildcard $$($1_RESULT_FILE)), \
532 $$(eval $1_RUN := $$(shell $$(AWK) \
533 '/==========.* tests? from .* test (cases?|suites?) ran/ { print $$$$2 }' \
534 $$($1_RESULT_FILE))) \
535 $$(if $$($1_RUN), , $$(eval $1_RUN := 0)) \
536 $$(eval $1_PASSED := $$(shell $$(AWK) '/\[ PASSED \] .* tests?./ \
537 { print $$$$4 }' $$($1_RESULT_FILE))) \
538 $$(if $$($1_PASSED), , $$(eval $1_PASSED := 0)) \
539 $$(eval $1_GTEST_DISABLED := $$(shell $$(AWK) '/YOU HAVE .* DISABLED TEST/ \
540 { print $$$$3 }' $$($1_RESULT_FILE))) \
541 $$(if $$($1_GTEST_DISABLED), , $$(eval $1_GTEST_DISABLED := 0)) \
542 $$(eval $1_GTEST_SKIPPED := $$(shell $$(AWK) '/\[ SKIPPED \] .* tests?.*/ \
543 { print $$$$4 }' $$($1_RESULT_FILE))) \
544 $$(if $$($1_GTEST_SKIPPED), , $$(eval $1_GTEST_SKIPPED := 0)) \
545 $$(eval $1_SKIPPED := $$(shell \
546 $$(EXPR) $$($1_GTEST_DISABLED) + $$($1_GTEST_SKIPPED))) \
547 $$(eval $1_FAILED := $$(shell $$(AWK) '/\[ FAILED \] .* tests?, \
548 listed below/ { print $$$$4 }' $$($1_RESULT_FILE))) \
549 $$(if $$($1_FAILED), , $$(eval $1_FAILED := 0)) \
550 $$(eval $1_ERROR := $$(shell \
551 $$(EXPR) $$($1_RUN) - $$($1_PASSED) - $$($1_FAILED) - $$($1_GTEST_SKIPPED))) \
552 $$(eval $1_TOTAL := $$(shell \
553 $$(EXPR) $$($1_RUN) + $$($1_GTEST_DISABLED))) \
554 , \
555 $$(eval $1_PASSED := 0) \
556 $$(eval $1_FAILED := 0) \
557 $$(eval $1_ERROR := 1) \
558 $$(eval $1_SKIPPED := 0) \
559 $$(eval $1_TOTAL := 1) \
560 )
561
562 $1: run-test-$1 parse-test-$1
563
564 TARGETS += $1 run-test-$1 parse-test-$1
565 TEST_TARGETS += parse-test-$1
566
567 endef
568
569 ################################################################################
570
571 ### Rules for Microbenchmarks
572
573 # Helper function for SetupRunMicroTest. Set a MICRO_* variable from, in order:
574 # 1) Specified by user on command line
575 # 2) Generic default
576 #
577 # Note: No spaces are allowed around the arguments.
578 # Arg $1 The test ID (i.e. $1 in SetupRunMicroTest)
579 # Arg $2 Base variable, e.g. MICRO_TEST_JDK
580 # Arg $3 The default value (optional)
581 define SetMicroValue
582 ifneq ($$($2), )
583 $1_$2 := $$($2)
584 else
585 ifneq ($3, )
586 $1_$2 := $3
587 else
588 $1_$2 :=
589 endif
590 endif
591 endef
592
593 SetupRunMicroTest = $(NamedParamsMacroTemplate)
594 define SetupRunMicroTestBody
595 $1_TEST_RESULTS_DIR := $$(TEST_RESULTS_DIR)/$1
596 $1_TEST_SUPPORT_DIR := $$(TEST_SUPPORT_DIR)/$1
597 $1_EXITCODE := $$($1_TEST_RESULTS_DIR)/exitcode.txt
598
599 $1_TEST_NAME := $$(strip $$(patsubst micro:%, %, $$($1_TEST)))
600
601 $$(eval $$(call SetMicroValue,$1,MICRO_BENCHMARKS_JAR,$$(TEST_IMAGE_DIR)/micro/benchmarks.jar))
602 $$(eval $$(call SetMicroValue,$1,MICRO_TEST_JDK,$$(JDK_UNDER_TEST)))
603 $$(eval $$(call SetMicroValue,$1,MICRO_JAVA_OPTIONS))
604
605 # Current tests needs to open java.io
606 $1_MICRO_JAVA_OPTIONS += --add-opens=java.base/java.io=ALL-UNNAMED
607
608 # Save output as JSON or CSV file
609 ifneq ($$(MICRO_RESULTS_FORMAT), )
610 $1_MICRO_BASIC_OPTIONS += -rf $$(MICRO_RESULTS_FORMAT)
611 $1_MICRO_BASIC_OPTIONS += -rff $$($1_TEST_RESULTS_DIR)/jmh-result.$(MICRO_RESULTS_FORMAT)
612 endif
613
614 # Set library path for native dependencies
615 $1_JMH_JVM_ARGS := -Djava.library.path=$$(TEST_IMAGE_DIR)/micro/native
616
617 ifneq ($$(MICRO_VM_OPTIONS)$$(MICRO_JAVA_OPTIONS), )
618 $1_JMH_JVM_ARGS += $$(MICRO_VM_OPTIONS) $$(MICRO_JAVA_OPTIONS)
619 endif
620
621 $1_MICRO_VM_OPTIONS := -jvmArgsPrepend $(call ShellQuote,$$($1_JMH_JVM_ARGS))
622
623 ifneq ($$(MICRO_ITER), )
624 $1_MICRO_ITER := -i $$(MICRO_ITER)
625 endif
626 ifneq ($$(MICRO_FORK), )
627 $1_MICRO_FORK := -f $$(MICRO_FORK)
628 endif
629 ifneq ($$(MICRO_TIME), )
630 $1_MICRO_TIME := -r $$(MICRO_TIME)
631 endif
632 ifneq ($$(MICRO_WARMUP_ITER), )
633 $1_MICRO_WARMUP_ITER := -wi $$(MICRO_WARMUP_ITER)
634 endif
635 ifneq ($$(MICRO_WARMUP_TIME), )
636 $1_MICRO_WARMUP_TIME := -w $$(MICRO_WARMUP_TIME)
637 endif
638
639 # Microbenchmarks are executed from the root of the test image directory.
640 # This enables JMH tests to add dependencies using relative paths such as
641 # -Djava.library.path=micro/native
642
643 run-test-$1: pre-run-test
644 $$(call LogWarn)
645 $$(call LogWarn, Running test '$$($1_TEST)')
646 $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
647 $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/micro, \
648 $$(CD) $$(TEST_IMAGE_DIR) && \
649 $$(FIXPATH) $$($1_MICRO_TEST_JDK)/bin/java $$($1_MICRO_JAVA_OPTIONS) \
650 -jar $$($1_MICRO_BENCHMARKS_JAR) \
651 $$($1_MICRO_ITER) $$($1_MICRO_FORK) $$($1_MICRO_TIME) \
652 $$($1_MICRO_WARMUP_ITER) $$($1_MICRO_WARMUP_TIME) \
653 $$($1_MICRO_VM_OPTIONS) $$($1_MICRO_BASIC_OPTIONS) $$(MICRO_OPTIONS) \
654 $$($1_TEST_NAME) \
655 > >($(TEE) $$($1_TEST_RESULTS_DIR)/micro.txt) \
656 && $$(ECHO) $$$$? > $$($1_EXITCODE) \
657 || $$(ECHO) $$$$? > $$($1_EXITCODE) \
658 )
659
660 $1_RESULT_FILE := $$($1_TEST_RESULTS_DIR)/micro.txt
661
662 parse-test-$1: run-test-$1
663 $$(call LogWarn, Finished running test '$$($1_TEST)')
664 $$(call LogWarn, Test report is stored in $$(strip \
665 $$(subst $$(TOPDIR)/, , $$($1_TEST_RESULTS_DIR))))
666 $$(if $$(wildcard $$($1_EXITCODE)), \
667 $$(eval $1_EXIT_CODE := $$(shell $$(CAT) $$($1_EXITCODE))) \
668 $$(if $$(filter 0, $$($1_EXIT_CODE)), \
669 $$(eval $1_PASSED := 1) \
670 $$(eval $1_ERROR := 0) \
671 , \
672 $$(eval $1_PASSED := 0) \
673 $$(eval $1_ERROR := 1) \
674 ) \
675 $$(eval $1_FAILED := 0) \
676 $$(eval $1_TOTAL := $$(shell \
677 $$(EXPR) $$($1_PASSED) + $$($1_ERROR))) \
678 , \
679 $$(eval $1_PASSED := 0) \
680 $$(eval $1_FAILED := 0) \
681 $$(eval $1_ERROR := 1) \
682 $$(eval $1_TOTAL := 1) \
683 )
684 $$(eval $1_SKIPPED := 0)
685
686 $1: run-test-$1 parse-test-$1
687
688 TARGETS += $1 run-test-$1 parse-test-$1
689 TEST_TARGETS += parse-test-$1
690
691 endef
692
693 ################################################################################
694
695 ### Rules for Jtreg
696
697 # Helper function for SetupRunJtregTest. Set a JTREG_* variable from, in order:
698 # 1) Specified by user on command line
699 # 2) Component-specific default
700 # 3) Generic default
701 #
702 # Note: No spaces are allowed around the arguments.
703 # Arg $1 The test ID (i.e. $1 in SetupRunJtregTest)
704 # Arg $2 Base variable, e.g. JTREG_JOBS
705 # Arg $3 The default value (optional)
706 define SetJtregValue
707 ifneq ($$($2), )
708 $1_$2 := $$($2)
709 else
710 ifneq ($$($$($1_COMPONENT)_$2), )
711 $1_$2 := $$($$($1_COMPONENT)_$2)
712 else
713 ifneq ($3, )
714 $1_$2 := $3
715 else
716 $1_$2 :=
717 endif
718 endif
719 endif
720 endef
721
722 ################################################################################
723 # Helper function for creating a customized AOT cache for running tests
724 ################################################################################
725
726 # Parameter 1 is the name of the rule.
727 #
728 # Remaining parameters are named arguments.
729 # TRAINING The AOT training mode: onestep or twostep
730 # VM_OPTIONS List of JVM arguments to use when creating AOT cache
731 #
732 # After calling this, the following variables are defined
733 # $1_AOT_TARGETS List of all targets that the test rule will need to depend on
734 # $1_AOT_JDK_CACHE The AOT cache file to be used to run the test with
735 #
736 SetupAOT = $(NamedParamsMacroTemplate)
737 define SetupAOTBody
738 $1_AOT_JDK_OUTPUT_DIR := $$($1_TEST_SUPPORT_DIR)/aot
739 $1_AOT_JDK_CONF := $$($1_AOT_JDK_OUTPUT_DIR)/jdk.aotconf
740 $1_AOT_JDK_CACHE := $$($1_AOT_JDK_OUTPUT_DIR)/jdk.aotcache
741 $1_AOT_JDK_LOG := $$($1_AOT_JDK_OUTPUT_DIR)/TestSetupAOT.log
742
743 # We execute the training run with the TestSetupAOT class from $(TEST_IMAGE_DIR)/setup_aot/TestSetupAOT.jar
744 # to touch a fair number of classes inside the JDK. Note that we can't specify a classpath,
745 # or else the AOT cache cannot be used with jtreg test cases that use a different value
746 # for their classpaths. Instead, we cd in the $$($1_AOT_JDK_OUTPUT_DIR) directory,
747 # extract the TestSetupAOT.jar there, and run in that directory without specifying a classpath.
748 # The "java" launcher will have an implicit classpath of ".", so it can pick up the TestSetupAOT
749 # class from the JVM's current directory.
750 #
751 # The TestSetupAOT class (or any other classes that are loaded from ".") will be excluded
752 # from the the AOT cache as "." is an unsupported location. As a result, the AOT cache will contain
753 # only classes from the JDK.
754
755 $$($1_AOT_JDK_CACHE): $$(JDK_IMAGE_DIR)/release
756 $$(call MakeDir, $$($1_AOT_JDK_OUTPUT_DIR))
757
758 ifeq ($$($1_TRAINING), onestep)
759
760 $$(call LogWarn, AOT: Create AOT cache $$($1_AOT_JDK_CACHE) in one step with flags: $$($1_VM_OPTIONS)) \
761 $$(call ExecuteWithLog, $$($1_AOT_JDK_OUTPUT_DIR), \
762 cd $$($1_AOT_JDK_OUTPUT_DIR); \
763 $(JAR) --extract --file $(TEST_IMAGE_DIR)/setup_aot/TestSetupAOT.jar; \
764 $$(FIXPATH) $(JDK_UNDER_TEST)/bin/java $$($1_VM_OPTIONS) \
765 -Xlog:class+load$$(COMMA)aot$$(COMMA)aot+class=debug:file=$$($1_AOT_JDK_CACHE).log -Xlog:cds*=error -Xlog:aot*=error \
766 -XX:AOTMode=record -XX:AOTCacheOutput=$$($1_AOT_JDK_CACHE) \
767 TestSetupAOT $$($1_AOT_JDK_OUTPUT_DIR) > $$($1_AOT_JDK_LOG) \
768 )
769
770 else
771
772 $$(call LogWarn, AOT: Create cache configuration) \
773 $$(call ExecuteWithLog, $$($1_AOT_JDK_OUTPUT_DIR), \
774 cd $$($1_AOT_JDK_OUTPUT_DIR); \
775 $(JAR) --extract --file $(TEST_IMAGE_DIR)/setup_aot/TestSetupAOT.jar; \
776 $$(FIXPATH) $(JDK_UNDER_TEST)/bin/java $$($1_VM_OPTIONS) \
777 -Xlog:class+load$$(COMMA)aot$$(COMMA)aot+class=debug:file=$$($1_AOT_JDK_CONF).log -Xlog:cds*=error -Xlog:aot*=error \
778 -XX:AOTMode=record -XX:AOTConfiguration=$$($1_AOT_JDK_CONF) \
779 TestSetupAOT $$($1_AOT_JDK_OUTPUT_DIR) > $$($1_AOT_JDK_LOG) \
780 )
781
782 $$(call LogWarn, AOT: Generate AOT cache $$($1_AOT_JDK_CACHE) with flags: $$($1_VM_OPTIONS))
783 $$(call ExecuteWithLog, $$($1_AOT_JDK_OUTPUT_DIR), \
784 $$(FIXPATH) $(JDK_UNDER_TEST)/bin/java \
785 $$($1_VM_OPTIONS) -Xlog:aot$$(COMMA)aot+class=debug:file=$$($1_AOT_JDK_CACHE).log -Xlog:cds*=error -Xlog:aot*=error \
786 -XX:ExtraSharedClassListFile=$(JDK_UNDER_TEST)/lib/classlist \
787 -XX:AOTMode=create -XX:AOTConfiguration=$$($1_AOT_JDK_CONF) -XX:AOTCache=$$($1_AOT_JDK_CACHE) \
788 )
789
790 endif
791
792 $1_AOT_TARGETS += $$($1_AOT_JDK_CACHE)
793
794 endef
795
796 SetupRunJtregTest = $(NamedParamsMacroTemplate)
797 define SetupRunJtregTestBody
798 $1_TEST_RESULTS_DIR := $$(TEST_RESULTS_DIR)/$1
799 $1_TEST_SUPPORT_DIR := $$(TEST_SUPPORT_DIR)/$1
800 $1_EXITCODE := $$($1_TEST_RESULTS_DIR)/exitcode.txt
801
802 $1_TEST_NAME := $$(strip $$(patsubst jtreg:%, %, $$($1_TEST)))
803
804 $1_TEST_ROOT := \
805 $$(strip $$(foreach root, $$(JTREG_TESTROOTS), \
806 $$(if $$(filter $$(root)%, $$(JTREG_TOPDIR)/$$($1_TEST_NAME)), $$(root)) \
807 ))
808 $1_COMPONENT := $$(lastword $$(subst /, $$(SPACE), $$($1_TEST_ROOT)))
809 # This will work only as long as just hotspot has the additional "jtreg" directory
810 ifeq ($$($1_COMPONENT), jtreg)
811 $1_COMPONENT := hotspot
812 endif
813
814 ifeq ($$(JT_HOME), )
815 $$(info Error: jtreg framework is not found.)
816 $$(info Please run configure using --with-jtreg.)
817 $$(error Cannot continue)
818 endif
819
820 # Unfortunately, we need different defaults for some JTREG values,
821 # depending on what component we're running.
822
823 # Convert JTREG_foo into $1_JTREG_foo with a suitable value.
824 $$(eval $$(call SetJtregValue,$1,JTREG_TEST_MODE,agentvm))
825 $$(eval $$(call SetJtregValue,$1,JTREG_ASSERT,true))
826 $$(eval $$(call SetJtregValue,$1,JTREG_MAX_MEM,768m))
827 $$(eval $$(call SetJtregValue,$1,JTREG_NATIVEPATH))
828 $$(eval $$(call SetJtregValue,$1,JTREG_BASIC_OPTIONS))
829 $$(eval $$(call SetJtregValue,$1,JTREG_PROBLEM_LIST))
830
831 # Only the problem list for the current test root should be used.
832 $1_JTREG_PROBLEM_LIST := $$(filter $$($1_TEST_ROOT)%, $$($1_JTREG_PROBLEM_LIST))
833
834 # Pass along the path to the tidy html checker
835 ifneq ($$(TIDY), )
836 $1_JTREG_BASIC_OPTIONS += -Dtidy=$$(TIDY)
837 endif
838
839 ifneq ($(TEST_JOBS), 0)
840 $$(eval $$(call SetJtregValue,$1,JTREG_JOBS,$$(TEST_JOBS)))
841 else
842 $$(eval $$(call SetJtregValue,$1,JTREG_JOBS,$$(JOBS)))
843 endif
844
845 # Make sure MaxRAMPercentage is high enough to not cause OOM or swapping since
846 # we may end up with a lot of JVM's
847 $1_JTREG_MAX_RAM_PERCENTAGE := $$(shell $(AWK) 'BEGIN { print 25 / $$($1_JTREG_JOBS); }')
848
849 JTREG_VERBOSE ?= fail,error,summary
850 JTREG_RETAIN ?= fail,error
851 JTREG_TEST_THREAD_FACTORY ?=
852 JTREG_RUN_PROBLEM_LISTS ?= false
853 JTREG_RETRY_COUNT ?= 0
854 JTREG_REPEAT_COUNT ?= 0
855 JTREG_REPORT ?= files
856 JTREG_AOT_JDK ?= none
857
858 ifneq ($$(JTREG_RETRY_COUNT), 0)
859 ifneq ($$(JTREG_REPEAT_COUNT), 0)
860 $$(info Error: Cannot use both JTREG_RETRY_COUNT and JTREG_REPEAT_COUNT together.)
861 $$(info Please choose one or the other.)
862 $$(error Cannot continue)
863 endif
864 endif
865
866 ifeq ($$(JTREG_RUN_PROBLEM_LISTS), true)
867 JTREG_PROBLEM_LIST_PREFIX := -match:
868 else
869 JTREG_PROBLEM_LIST_PREFIX := -exclude:
870 endif
871
872 ifneq ($$(JTREG_TEST_THREAD_FACTORY), )
873 $1_JTREG_BASIC_OPTIONS += -testThreadFactoryPath:$$(JTREG_TEST_THREAD_FACTORY_JAR)
874 $1_JTREG_BASIC_OPTIONS += -testThreadFactory:$$(JTREG_TEST_THREAD_FACTORY)
875 $1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
876 $$(addprefix $$($1_TEST_ROOT)/, ProblemList-$$(JTREG_TEST_THREAD_FACTORY).txt) \
877 ))
878 endif
879
880 ifneq ($$(JTREG_JVMTI_STRESS_AGENT), )
881 AGENT := $$(LIBRARY_PREFIX)JvmtiStressAgent$$(SHARED_LIBRARY_SUFFIX)=$$(JTREG_JVMTI_STRESS_AGENT)
882 $1_JTREG_BASIC_OPTIONS += -javaoption:'-agentpath:$(TEST_IMAGE_DIR)/hotspot/jtreg/native/$$(AGENT)'
883 $1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
884 $$(addprefix $$($1_TEST_ROOT)/, ProblemList-jvmti-stress-agent.txt) \
885 ))
886 endif
887
888
889 ifneq ($$(JTREG_LAUNCHER_OPTIONS), )
890 $1_JTREG_LAUNCHER_OPTIONS += $$(JTREG_LAUNCHER_OPTIONS)
891 endif
892
893 ifneq ($$(JTREG_MAX_OUTPUT), )
894 $1_JTREG_LAUNCHER_OPTIONS += -Djavatest.maxOutputSize=$$(JTREG_MAX_OUTPUT)
895 endif
896
897 ifneq ($$($1_JTREG_MAX_MEM), 0)
898 $1_JTREG_BASIC_OPTIONS += -vmoption:-Xmx$$($1_JTREG_MAX_MEM)
899 $1_JTREG_LAUNCHER_OPTIONS += -Xmx$$($1_JTREG_MAX_MEM)
900 endif
901
902 # Make sure the tmp dir is normalized as some tests will react badly otherwise
903 $1_TEST_TMP_DIR := $$(abspath $$($1_TEST_SUPPORT_DIR)/tmp)
904
905 # test.boot.jdk is used by some test cases that want to execute a previous
906 # version of the JDK.
907 $1_JTREG_BASIC_OPTIONS += -$$($1_JTREG_TEST_MODE) \
908 -verbose:$$(JTREG_VERBOSE) -retain:$$(JTREG_RETAIN) \
909 -concurrency:$$($1_JTREG_JOBS) \
910 -vmoption:-XX:MaxRAMPercentage=$$($1_JTREG_MAX_RAM_PERCENTAGE) \
911 -vmoption:-Dtest.boot.jdk="$$(BOOT_JDK)" \
912 -vmoption:-Djava.io.tmpdir="$$($1_TEST_TMP_DIR)"
913
914 $1_JTREG_BASIC_OPTIONS += -ignore:quiet
915
916 ifeq ($$(JTREG_MANUAL), true)
917 $1_JTREG_BASIC_OPTIONS += -manual
918 else
919 $1_JTREG_BASIC_OPTIONS += -automatic
920 endif
921
922 # Make it possible to specify the JIB_DATA_DIR for tests using the
923 # JIB Artifact resolver
924 $1_JTREG_BASIC_OPTIONS += -e:JIB_DATA_DIR
925 # If running on Windows, propagate the _NT_SYMBOL_PATH to enable
926 # symbol lookup in hserr files
927 # The minidumps are disabled by default on client Windows, so enable them
928 ifeq ($$(call isTargetOs, windows), true)
929 $1_JTREG_BASIC_OPTIONS += -e:_NT_SYMBOL_PATH
930 $1_JTREG_BASIC_OPTIONS += -vmoption:-XX:+CreateCoredumpOnCrash
931 else ifeq ($$(call isTargetOs, linux), true)
932 $1_JTREG_BASIC_OPTIONS += -e:_JVM_DWARF_PATH=$$(SYMBOLS_IMAGE_DIR)
933 endif
934
935 $1_JTREG_BASIC_OPTIONS += \
936 $$(addprefix -javaoption:, $$(JTREG_JAVA_OPTIONS)) \
937 $$(addprefix -vmoption:, $$(JTREG_VM_OPTIONS)) \
938 #
939
940 ifeq ($$($1_JTREG_ASSERT), true)
941 $1_JTREG_BASIC_OPTIONS += -ea -esa
942 endif
943
944 ifneq ($$($1_JTREG_NATIVEPATH), )
945 $1_JTREG_BASIC_OPTIONS += -nativepath:$$($1_JTREG_NATIVEPATH)
946 endif
947
948 ifneq ($$($1_JTREG_PROBLEM_LIST), )
949 $1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$($1_JTREG_PROBLEM_LIST))
950 endif
951
952 JTREG_ALL_OPTIONS := $$(JTREG_JAVA_OPTIONS) $$(JTREG_VM_OPTIONS)
953
954 JTREG_AUTO_PROBLEM_LISTS :=
955 # Please reach consensus before changing this.
956 JTREG_AUTO_TIMEOUT_FACTOR := 4
957
958 ifneq ($$(findstring -Xcomp, $$(JTREG_ALL_OPTIONS)), )
959 JTREG_AUTO_PROBLEM_LISTS += ProblemList-Xcomp.txt
960 JTREG_AUTO_TIMEOUT_FACTOR := 10
961 endif
962
963 ifneq ($$(findstring -XX:+UseZGC, $$(JTREG_ALL_OPTIONS)), )
964 JTREG_AUTO_PROBLEM_LISTS += ProblemList-zgc.txt
965 endif
966
967 ifneq ($$(findstring -XX:+UseShenandoahGC, $$(JTREG_ALL_OPTIONS)), )
968 JTREG_AUTO_PROBLEM_LISTS += ProblemList-shenandoah.txt
969 endif
970
971 ifneq ($$(findstring --enable-preview, $$(JTREG_ALL_OPTIONS)), )
972 JTREG_AUTO_PROBLEM_LISTS += ProblemList-enable-preview.txt
973 endif
974
975 ifneq ($$(findstring -XX:+UseCompactObjectHeaders, $$(JTREG_ALL_OPTIONS)), )
976 JTREG_AUTO_PROBLEM_LISTS += ProblemList-coh.txt
977 endif
978
979
980 ifneq ($$(JTREG_EXTRA_PROBLEM_LISTS), )
981 # Accept both absolute paths as well as relative to the current test root.
982 $1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
983 $$(JTREG_EXTRA_PROBLEM_LISTS) \
984 $$(addprefix $$($1_TEST_ROOT)/, $$(JTREG_EXTRA_PROBLEM_LISTS)) \
985 ))
986 endif
987
988 ifneq ($$(JIB_HOME), )
989 $1_JTREG_BASIC_OPTIONS += -e:JIB_HOME=$$(JIB_HOME)
990 endif
991
992 ifneq ($$(JDK_FOR_COMPILE), )
993 # Allow overriding the JDK used for compilation from the command line
994 $1_JTREG_BASIC_OPTIONS += -compilejdk:$$(JDK_FOR_COMPILE)
995 endif
996
997 $1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_DIR=$(TEST_IMAGE_DIR)
998
999 $1_JTREG_BASIC_OPTIONS += -e:DOCS_JDK_IMAGE_DIR=$$(DOCS_JDK_IMAGE_DIR)
1000
1001 ifneq ($$(JTREG_FAILURE_HANDLER_OPTIONS), )
1002 $1_JTREG_LAUNCHER_OPTIONS += -Djava.library.path="$(JTREG_FAILURE_HANDLER_DIR)"
1003 endif
1004
1005 ifneq ($$(JTREG_KEYWORDS), )
1006 # The keywords string may contain problematic characters and may be quoted
1007 # already when it arrives here. Remove any existing quotes and replace them
1008 # with one set of single quotes.
1009 $1_JTREG_KEYWORDS := \
1010 $$(strip $$(subst $$(SQUOTE),,$$(subst $$(DQUOTE),,$$(JTREG_KEYWORDS))))
1011 ifneq ($$($1_JTREG_KEYWORDS), )
1012 $1_JTREG_BASIC_OPTIONS += -k:'$$($1_JTREG_KEYWORDS)'
1013 endif
1014 endif
1015
1016 ifneq ($$(filter $$(JTREG_AOT_JDK), onestep twostep), )
1017 $$(call LogWarn, Add AOT target for $1)
1018 $$(eval $$(call SetupAOT, $1, \
1019 TRAINING := $$(JTREG_AOT_JDK), \
1020 VM_OPTIONS := $$(JTREG_ALL_OPTIONS) ))
1021 $$(call LogWarn, AOT_JDK_CACHE=$$($1_AOT_JDK_CACHE))
1022 $1_JTREG_BASIC_OPTIONS += -vmoption:-XX:AOTCache="$$($1_AOT_JDK_CACHE)"
1023 endif
1024
1025
1026 $$(eval $$(call SetupRunJtregTestCustom, $1))
1027
1028 # SetupRunJtregTestCustom might also adjust JTREG_AUTO_ variables
1029 # so set the final results after setting values from custom setup
1030 ifneq ($$(JTREG_AUTO_PROBLEM_LISTS), )
1031 # Accept both absolute paths as well as relative to the current test root.
1032 $1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
1033 $$(JTREG_AUTO_PROBLEM_LISTS) \
1034 $$(addprefix $$($1_TEST_ROOT)/, $$(JTREG_AUTO_PROBLEM_LISTS)) \
1035 ))
1036 endif
1037
1038 JTREG_TIMEOUT_FACTOR ?= $$(JTREG_AUTO_TIMEOUT_FACTOR)
1039 $1_JTREG_BASIC_OPTIONS += -timeoutFactor:$$(JTREG_TIMEOUT_FACTOR)
1040
1041 clean-outputdirs-$1:
1042 $$(call LogWarn, Clean up dirs for $1)
1043 $$(RM) -r $$($1_TEST_SUPPORT_DIR)
1044 $$(RM) -r $$($1_TEST_RESULTS_DIR)
1045
1046 $1_COMMAND_LINE := \
1047 $$(JTREG_JAVA) $$($1_JTREG_LAUNCHER_OPTIONS) \
1048 -Dprogram=jtreg -jar $$(JT_HOME)/lib/jtreg.jar \
1049 $$($1_JTREG_BASIC_OPTIONS) \
1050 -testjdk:$$(JDK_UNDER_TEST) \
1051 -dir:$$(JTREG_TOPDIR) \
1052 -reportDir:$$($1_TEST_RESULTS_DIR) \
1053 -workDir:$$($1_TEST_SUPPORT_DIR) \
1054 -report:$${JTREG_REPORT} \
1055 $$$${JTREG_STATUS} \
1056 $$(JTREG_OPTIONS) \
1057 $$(JTREG_FAILURE_HANDLER_OPTIONS) \
1058 $$(JTREG_COV_OPTIONS) \
1059 $$($1_TEST_NAME) \
1060 && $$(ECHO) $$$$? > $$($1_EXITCODE) \
1061 || $$(ECHO) $$$$? > $$($1_EXITCODE)
1062
1063
1064 ifneq ($$(JTREG_RETRY_COUNT), 0)
1065 $1_COMMAND_LINE := \
1066 for i in {0..$$(JTREG_RETRY_COUNT)}; do \
1067 if [ "$$$$i" != 0 ]; then \
1068 $$(ECHO) ""; \
1069 $$(ECHO) "Retrying Jtreg run. Attempt: $$$$i"; \
1070 fi; \
1071 $$($1_COMMAND_LINE); \
1072 if [ "`$$(CAT) $$($1_EXITCODE)`" = "0" ]; then \
1073 break; \
1074 fi; \
1075 export JTREG_STATUS="-status:error,fail"; \
1076 done
1077 endif
1078
1079 ifneq ($$(JTREG_REPEAT_COUNT), 0)
1080 $1_COMMAND_LINE := \
1081 for i in {1..$$(JTREG_REPEAT_COUNT)}; do \
1082 $$(ECHO) ""; \
1083 $$(ECHO) "Repeating Jtreg run: $$$$i out of $$(JTREG_REPEAT_COUNT)"; \
1084 $$($1_COMMAND_LINE); \
1085 if [ "`$$(CAT) $$($1_EXITCODE)`" != "0" ]; then \
1086 $$(ECHO) ""; \
1087 $$(ECHO) "Failures detected, no more repeats."; \
1088 break; \
1089 fi; \
1090 done
1091 endif
1092
1093 run-test-$1: clean-outputdirs-$1 pre-run-test $$($1_AOT_TARGETS)
1094 $$(call LogWarn)
1095 $$(call LogWarn, Running test '$$($1_TEST)')
1096 $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR) \
1097 $$($1_TEST_TMP_DIR))
1098 $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/jtreg, \
1099 $$(COV_ENVIRONMENT) $$($1_COMMAND_LINE) \
1100 )
1101
1102 $1_RESULT_FILE := $$($1_TEST_RESULTS_DIR)/text/stats.txt
1103
1104 parse-test-$1: run-test-$1
1105 $$(call LogWarn, Finished running test '$$($1_TEST)')
1106 $$(call LogWarn, Test report is stored in $$(strip \
1107 $$(subst $$(TOPDIR)/, , $$($1_TEST_RESULTS_DIR))))
1108
1109 # Read jtreg documentation to learn on the test stats categories:
1110 # https://github.com/openjdk/jtreg/blob/master/src/share/doc/javatest/regtest/faq.md#what-do-all-those-numbers-in-the-test-results-line-mean
1111 # In jtreg, "skipped:" category accounts for tests that threw jtreg.SkippedException at runtime.
1112 # At the same time these tests contribute to "passed:" tests.
1113 # In here we don't want that and so we substract number of "skipped:" from "passed:".
1114
1115 $$(if $$(wildcard $$($1_RESULT_FILE)), \
1116 $$(eval $1_PASSED_AND_RUNTIME_SKIPPED := $$(shell $$(AWK) '{ gsub(/[,;]/, ""); \
1117 for (i=1; i<=NF; i++) { if ($$$$i == "passed:") \
1118 print $$$$(i+1) } }' $$($1_RESULT_FILE))) \
1119 $$(if $$($1_PASSED_AND_RUNTIME_SKIPPED), , $$(eval $1_PASSED_AND_RUNTIME_SKIPPED := 0)) \
1120 $$(eval $1_FAILED := $$(shell $$(AWK) '{gsub(/[,;]/, ""); \
1121 for (i=1; i<=NF; i++) { if ($$$$i == "failed:") \
1122 print $$$$(i+1) } }' $$($1_RESULT_FILE))) \
1123 $$(if $$($1_FAILED), , $$(eval $1_FAILED := 0)) \
1124 $$(eval $1_RUNTIME_SKIPPED := $$(shell $$(AWK) '{gsub(/[,;]/, ""); \
1125 for (i=1; i<=NF; i++) { if ($$$$i == "skipped:") \
1126 print $$$$(i+1) } }' $$($1_RESULT_FILE))) \
1127 $$(if $$($1_RUNTIME_SKIPPED), , $$(eval $1_RUNTIME_SKIPPED := 0)) \
1128 $$(eval $1_SKIPPED := $$(shell \
1129 $$(AWK) \
1130 'BEGIN { \
1131 overall_skipped = 0; \
1132 patterns[1] = "skipped"; \
1133 patterns[2] = "excluded"; \
1134 patterns[3] = "not in match-list"; \
1135 patterns[4] = "did not match keywords"; \
1136 patterns[5] = "did not meet module requirements"; \
1137 patterns[6] = "did not meet platform requirements"; \
1138 patterns[7] = "did not match prior status"; \
1139 patterns[8] = "did not meet time-limit requirements"; \
1140 } { \
1141 split($$$$0, arr, ";"); \
1142 for (item in arr) { \
1143 for (p in patterns) { \
1144 if (match(arr[item], patterns[p] ": [0-9]+")) { \
1145 overall_skipped += substr(arr[item], RSTART + length(patterns[p]) + 2, RLENGTH); \
1146 } \
1147 } \
1148 } \
1149 print overall_skipped; \
1150 }' \
1151 $$($1_RESULT_FILE) \
1152 )) \
1153 $$(eval $1_ERROR := $$(shell $$(AWK) '{gsub(/[,;]/, ""); \
1154 for (i=1; i<=NF; i++) { if ($$$$i == "error:") \
1155 print $$$$(i+1) } }' $$($1_RESULT_FILE))) \
1156 $$(if $$($1_ERROR), , $$(eval $1_ERROR := 0)) \
1157 \
1158 $$(eval $1_PASSED := $$(shell \
1159 $$(EXPR) $$($1_PASSED_AND_RUNTIME_SKIPPED) - $$($1_RUNTIME_SKIPPED))) \
1160 $$(eval $1_TOTAL := $$(shell \
1161 $$(EXPR) $$($1_PASSED) + $$($1_FAILED) + $$($1_ERROR) + $$($1_SKIPPED))) \
1162 , \
1163 $$(eval $1_PASSED_AND_RUNTIME_SKIPPED := 0) \
1164 $$(eval $1_PASSED := 0) \
1165 $$(eval $1_RUNTIME_SKIPPED := 0) \
1166 $$(eval $1_SKIPPED := 0) \
1167 $$(eval $1_FAILED := 0) \
1168 $$(eval $1_ERROR := 1) \
1169 $$(eval $1_TOTAL := 1) \
1170 )
1171
1172 $1: run-test-$1 parse-test-$1 clean-outputdirs-$1
1173
1174 TARGETS += $1 run-test-$1 parse-test-$1 clean-outputdirs-$1
1175 TEST_TARGETS += parse-test-$1
1176
1177 endef
1178
1179 ################################################################################
1180
1181 ### Rules for special tests
1182
1183 SetupRunSpecialTest = $(NamedParamsMacroTemplate)
1184 define SetupRunSpecialTestBody
1185 $1_TEST_RESULTS_DIR := $$(TEST_RESULTS_DIR)/$1
1186 $1_TEST_SUPPORT_DIR := $$(TEST_SUPPORT_DIR)/$1
1187 $1_EXITCODE := $$($1_TEST_RESULTS_DIR)/exitcode.txt
1188
1189 $1_FULL_TEST_NAME := $$(strip $$(patsubst special:%, %, $$($1_TEST)))
1190 ifneq ($$(findstring :, $$($1_FULL_TEST_NAME)), )
1191 $1_TEST_NAME := $$(firstword $$(subst :, ,$$($1_FULL_TEST_NAME)))
1192 $1_TEST_ARGS := $$(strip $$(patsubst special:$$($1_TEST_NAME):%, %, $$($1_TEST)))
1193 else
1194 $1_TEST_NAME := $$($1_FULL_TEST_NAME)
1195 $1_TEST_ARGS :=
1196 endif
1197
1198 ifeq ($$($1_TEST_NAME), failure-handler)
1199 ifeq ($(BUILD_FAILURE_HANDLER), true)
1200 $1_TEST_COMMAND_LINE := \
1201 ($(CD) $(TOPDIR)/make/test && $(MAKE) $(MAKE_ARGS) -f \
1202 BuildFailureHandler.gmk test)
1203 else
1204 $$(error Cannot test failure handler if it is not built)
1205 endif
1206 else ifeq ($$($1_TEST_NAME), make)
1207 $1_TEST_COMMAND_LINE := \
1208 ($(CD) $(TOPDIR)/test/make && $(MAKE) $(MAKE_ARGS) -f \
1209 TestMake.gmk $$($1_TEST_ARGS) TEST_SUPPORT_DIR="$$($1_TEST_SUPPORT_DIR)")
1210 else
1211 $$(error Invalid special test specification: $$($1_TEST_NAME))
1212 endif
1213
1214 run-test-$1: pre-run-test
1215 $$(call LogWarn)
1216 $$(call LogWarn, Running test '$$($1_TEST)')
1217 $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
1218 $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/test-execution, \
1219 $$($1_TEST_COMMAND_LINE) \
1220 > >($(TEE) $$($1_TEST_RESULTS_DIR)/test-output.txt) \
1221 && $$(ECHO) $$$$? > $$($1_EXITCODE) \
1222 || $$(ECHO) $$$$? > $$($1_EXITCODE) \
1223 )
1224
1225 # We can not parse the various "special" tests.
1226 parse-test-$1: run-test-$1
1227 $$(call LogWarn, Finished running test '$$($1_TEST)')
1228 $$(call LogWarn, Test report is stored in $$(strip \
1229 $$(subst $$(TOPDIR)/, , $$($1_TEST_RESULTS_DIR))))
1230 $$(call LogWarn, Warning: Special test results are not properly parsed!)
1231 $$(eval $1_PASSED := $$(shell \
1232 if [ `$(CAT) $$($1_EXITCODE)` = "0" ]; then $(ECHO) 1; else $(ECHO) 0; fi \
1233 ))
1234 $$(eval $1_SKIPPED := 0)
1235 $$(eval $1_FAILED := $$(shell \
1236 if [ `$(CAT) $$($1_EXITCODE)` = "0" ]; then $(ECHO) 0; else $(ECHO) 1; fi \
1237 ))
1238 $$(eval $1_ERROR := 0)
1239 $$(eval $1_TOTAL := 1)
1240
1241 $1: run-test-$1 parse-test-$1
1242
1243 TARGETS += $1 run-test-$1 parse-test-$1
1244 TEST_TARGETS += parse-test-$1
1245
1246 endef
1247
1248 ################################################################################
1249 # Setup and execute make rules for all selected tests
1250 ################################################################################
1251
1252 # Helper function to determine which handler to use for the given test
1253 UseGtestTestHandler = \
1254 $(if $(filter gtest:%, $1), true)
1255
1256 UseMicroTestHandler = \
1257 $(if $(filter micro:%, $1), true)
1258
1259 UseJtregTestHandler = \
1260 $(if $(filter jtreg:%, $1), true)
1261
1262 UseSpecialTestHandler = \
1263 $(if $(filter special:%, $1), true)
1264
1265 # Now process each test to run and setup a proper make rule
1266 $(foreach test, $(TESTS_TO_RUN), \
1267 $(eval TEST_ID := $(shell $(ECHO) $(strip $(test)) | \
1268 $(TR) -cs '[a-z][A-Z][0-9]\n' '_')) \
1269 $(eval ALL_TEST_IDS += $(TEST_ID)) \
1270 $(if $(call UseCustomTestHandler, $(test)), \
1271 $(eval $(call SetupRunCustomTest, $(TEST_ID), \
1272 TEST := $(test), \
1273 )) \
1274 ) \
1275 $(if $(call UseGtestTestHandler, $(test)), \
1276 $(eval $(call SetupRunGtestTest, $(TEST_ID), \
1277 TEST := $(test), \
1278 )) \
1279 ) \
1280 $(if $(call UseMicroTestHandler, $(test)), \
1281 $(eval $(call SetupRunMicroTest, $(TEST_ID), \
1282 TEST := $(test), \
1283 )) \
1284 ) \
1285 $(if $(call UseJtregTestHandler, $(test)), \
1286 $(eval $(call SetupRunJtregTest, $(TEST_ID), \
1287 TEST := $(test), \
1288 )) \
1289 ) \
1290 $(if $(call UseSpecialTestHandler, $(test)), \
1291 $(eval $(call SetupRunSpecialTest, $(TEST_ID), \
1292 TEST := $(test), \
1293 )) \
1294 ) \
1295 )
1296
1297 # Sort also removes duplicates, so if there is any we'll get fewer words.
1298 ifneq ($(words $(ALL_TEST_IDS)), $(words $(sort $(ALL_TEST_IDS))))
1299 $(error Duplicate test specification)
1300 endif
1301
1302
1303 ################################################################################
1304 # The main target for RunTests.gmk
1305 ################################################################################
1306
1307 #
1308 # Provide hooks for adding functionality before and after all tests are run.
1309 #
1310
1311 $(call LogInfo, RunTest setup starting)
1312
1313 # This target depends on all actual test having been run (TEST_TARGETS has beeen
1314 # populated by the SetupRun*Test functions). If you need to provide a teardown
1315 # hook, you must let it depend on this target.
1316 run-all-tests: $(TEST_TARGETS)
1317 $(call LogInfo, RunTest teardown starting)
1318
1319 # This is an abstract target that will be run before any actual tests. Add your
1320 # target as a dependency to thisif you need "setup" type functionality executed
1321 # before all tests.
1322 pre-run-test:
1323 $(call LogInfo, RunTest setup done)
1324
1325 # This is an abstract target that will be run after all actual tests, but before
1326 # the test summary. If you need "teardown" type functionality, add your target
1327 # as a dependency on this, and let the teardown target depend on run-all-tests.
1328 post-run-test: run-all-tests
1329 $(call LogInfo, RunTest teardown done)
1330
1331 #
1332 # Create and print a table of the result of all tests run
1333 #
1334 TEST_FAILURE := false
1335
1336 run-test-report: post-run-test
1337 $(RM) $(TEST_SUMMARY).old 2> /dev/null
1338 $(MV) $(TEST_SUMMARY) $(TEST_SUMMARY).old 2> /dev/null || true
1339 $(RM) $(TEST_LAST_IDS).old 2> /dev/null
1340 $(MV) $(TEST_LAST_IDS) $(TEST_LAST_IDS).old 2> /dev/null || true
1341 $(ECHO) >> $(TEST_SUMMARY) ==============================
1342 $(ECHO) >> $(TEST_SUMMARY) Test summary
1343 $(ECHO) >> $(TEST_SUMMARY) ==============================
1344 $(PRINTF) >> $(TEST_SUMMARY) "%2s %-49s %5s %5s %5s %5s %5s %2s\n" " " \
1345 TEST TOTAL PASS FAIL ERROR SKIP " "
1346 $(foreach test, $(TESTS_TO_RUN), \
1347 $(eval TEST_ID := $(shell $(ECHO) $(strip $(test)) | \
1348 $(TR) -cs '[a-z][A-Z][0-9]\n' '_')) \
1349 $(ECHO) >> $(TEST_LAST_IDS) $(TEST_ID) $(NEWLINE) \
1350 $(eval NAME_PATTERN := $(shell $(ECHO) $(test) | $(TR) -c '\n' '_')) \
1351 $(if $(filter __________________________________________________%, $(NAME_PATTERN)), \
1352 $(eval TEST_NAME := ) \
1353 $(PRINTF) >> $(TEST_SUMMARY) "%2s %-49s\n" " " "$(test)" $(NEWLINE) \
1354 , \
1355 $(eval TEST_NAME := $(test)) \
1356 ) \
1357 $(if $(filter-out 0, $($(TEST_ID)_FAILED) $($(TEST_ID)_ERROR)), \
1358 $(PRINTF) >> $(TEST_SUMMARY) "%2s %-49s %5d %5d %5d %5d %5d %2s\n" \
1359 ">>" "$(TEST_NAME)" $($(TEST_ID)_TOTAL) $($(TEST_ID)_PASSED) \
1360 $($(TEST_ID)_FAILED) $($(TEST_ID)_ERROR) $($(TEST_ID)_SKIPPED) "<<" $(NEWLINE) \
1361 $(eval TEST_FAILURE := true) \
1362 , \
1363 $(PRINTF) >> $(TEST_SUMMARY) "%2s %-49s %5d %5d %5d %5d %5d %2s\n" \
1364 " " "$(TEST_NAME)" $($(TEST_ID)_TOTAL) $($(TEST_ID)_PASSED) \
1365 $($(TEST_ID)_FAILED) $($(TEST_ID)_ERROR) $($(TEST_ID)_SKIPPED) " " $(NEWLINE) \
1366 ) \
1367 )
1368 $(ECHO) >> $(TEST_SUMMARY) ==============================
1369 $(if $(filter true, $(TEST_FAILURE)), \
1370 $(ECHO) >> $(TEST_SUMMARY) TEST FAILURE $(NEWLINE) \
1371 $(MKDIR) -p $(MAKESUPPORT_OUTPUTDIR) $(NEWLINE) \
1372 $(TOUCH) $(MAKESUPPORT_OUTPUTDIR)/exit-with-error \
1373 , \
1374 $(ECHO) >> $(TEST_SUMMARY) TEST SUCCESS \
1375 )
1376 $(ECHO)
1377 $(CAT) $(TEST_SUMMARY)
1378 $(ECHO)
1379
1380 # The main run-test target
1381 run-test: run-test-report
1382
1383 TARGETS += run-all-tests pre-run-test post-run-test run-test-report run-test
1384
1385 ################################################################################
1386 # Setup JCov
1387 ################################################################################
1388
1389 ifeq ($(TEST_OPTS_JCOV), true)
1390
1391 JCOV_VM_OPTS := -Xmx4g -Djdk.xml.totalEntitySizeLimit=0 -Djdk.xml.maxGeneralEntitySizeLimit=0
1392
1393 jcov-do-start-grabber:
1394 $(call MakeDir, $(JCOV_OUTPUT_DIR))
1395 if $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -status 1>/dev/null 2>&1 ; then \
1396 $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -stop -stoptimeout 3600 ; \
1397 fi
1398 $(JAVA) $(JCOV_VM_OPTS) -jar $(JCOV_HOME)/lib/jcov.jar Grabber -v -t \
1399 $(JCOV_IMAGE_DIR)/template.xml -o $(JCOV_RESULT_FILE) \
1400 1>$(JCOV_GRABBER_LOG) 2>&1 &
1401
1402 jcov-start-grabber: jcov-do-start-grabber
1403 $(call LogWarn, Starting JCov Grabber...)
1404 $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -t 600 -wait
1405
1406 jcov-stop-grabber:
1407 $(call LogWarn, Stopping JCov Grabber...)
1408 $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -stop -stoptimeout 3600
1409
1410 JCOV_REPORT_TITLE := JDK code coverage report<br/>
1411 ifneq ($(JCOV_MODULES), )
1412 JCOV_MODULES_FILTER := $(foreach m, $(JCOV_MODULES), -include_module $m)
1413 JCOV_REPORT_TITLE += Included modules: $(JCOV_MODULES)<br>
1414 endif
1415 ifneq ($(JCOV_FILTERS), )
1416 JCOV_REPORT_TITLE += Code filters: $(JCOV_FILTERS)<br>
1417 endif
1418 JCOV_REPORT_TITLE += Tests: $(TEST)
1419
1420 jcov-gen-report: jcov-stop-grabber
1421 $(call LogWarn, Generating JCov report ...)
1422 $(call ExecuteWithLog, $(JCOV_SUPPORT_DIR)/run-jcov-repgen, \
1423 $(JAVA) $(JCOV_VM_OPTS) -jar $(JCOV_HOME)/lib/jcov.jar RepGen -sourcepath \
1424 `$(ECHO) $(TOPDIR)/src/*/share/classes/ | $(TR) ' ' ':'` -fmt html \
1425 $(JCOV_MODULES_FILTER) $(JCOV_FILTERS) \
1426 -mainReportTitle "$(JCOV_REPORT_TITLE)" \
1427 -o $(JCOV_REPORT) $(JCOV_RESULT_FILE))
1428
1429 TARGETS += jcov-do-start-grabber jcov-start-grabber jcov-stop-grabber \
1430 jcov-gen-report
1431
1432 ifneq ($(TEST_OPTS_JCOV_DIFF_CHANGESET), )
1433
1434 JCOV_SOURCE_DIFF := $(JCOV_OUTPUT_DIR)/source_diff
1435 JCOV_DIFF_COVERAGE_REPORT := $(JCOV_OUTPUT_DIR)/diff_coverage_report
1436
1437 ifneq ($(and $(GIT), $(wildcard $(TOPDIR)/.git)), )
1438 DIFF_COMMAND := $(GIT) -C $(TOPDIR) diff $(TEST_OPTS_JCOV_DIFF_CHANGESET) > $(JCOV_SOURCE_DIFF)
1439 else
1440 $(info Error: Must be a git source tree for diff coverage.)
1441 $(error No git source tree.)
1442 endif
1443
1444 jcov-gen-diffcoverage: jcov-stop-grabber
1445 $(call LogWarn, Generating diff coverage with changeset $(TEST_OPTS_JCOV_DIFF_CHANGESET) ... )
1446 $(DIFF_COMMAND)
1447 $(JAVA) $(JCOV_VM_OPTS) -jar $(JCOV_HOME)/lib/jcov.jar \
1448 DiffCoverage -replaceDiff "src/.*/classes/:" -all \
1449 $(JCOV_RESULT_FILE) $(JCOV_SOURCE_DIFF) > \
1450 $(JCOV_DIFF_COVERAGE_REPORT)
1451
1452 TARGETS += jcov-gen-diffcoverage
1453
1454 endif
1455
1456 # Hook this into the framework at appropriate places
1457 pre-run-test: jcov-start-grabber
1458
1459 post-run-test: jcov-gen-report
1460
1461 ifneq ($(TEST_OPTS_JCOV_DIFF_CHANGESET), )
1462
1463 post-run-test: jcov-gen-diffcoverage
1464
1465 endif
1466
1467 jcov-stop-grabber: run-all-tests
1468
1469 endif
1470
1471 ################################################################################
1472
1473 all: run-test
1474
1475 .PHONY: $(TARGETS)
1476
1477 ################################################################################
1478
1479 include MakeFileEnd.gmk