< prev index next >

make/RunTests.gmk

Print this page

        

@@ -1,7 +1,7 @@
 #
-# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
 # This code is free software; you can redistribute it and/or modify it
 # under the terms of the GNU General Public License version 2 only, as
 # published by the Free Software Foundation.  Oracle designates this

@@ -43,12 +43,12 @@
     TEST_OPTS := VM_OPTIONS=$(TEST_VM_OPTS)
   endif
 endif
 
 $(eval $(call ParseKeywordVariable, TEST_OPTS, \
-    SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR JCOV, \
-    STRING_KEYWORDS := VM_OPTIONS JAVA_OPTIONS AOT_MODULES, \
+    SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR AOT_MODULES, \
+    STRING_KEYWORDS := VM_OPTIONS JAVA_OPTIONS, \
 ))
 
 # Helper function to propagate TEST_OPTS values.
 #
 # Note: No spaces are allowed around the arguments.

@@ -59,33 +59,30 @@
     $2_$1 := $$(TEST_OPTS_$1)
   endif
 endef
 
 # Setup _NT_SYMBOL_PATH on Windows
-ifeq ($(call isTargetOs, windows), true)
+ifeq ($(OPENJDK_TARGET_OS), windows)
   ifndef _NT_SYMBOL_PATH
     # Can't use PathList here as it adds quotes around the value.
     _NT_SYMBOL_PATH := \
         $(subst $(SPACE),;,$(strip \
             $(foreach p, $(sort $(dir $(wildcard \
                 $(addprefix $(SYMBOLS_IMAGE_DIR)/bin/, *.pdb */*.pdb)))), \
               $(call FixPath, $p) \
             ) \
         ))
     export _NT_SYMBOL_PATH
-    $(call LogDebug, Rewriting _NT_SYMBOL_PATH to $(_NT_SYMBOL_PATH))
+    $(info _NT_SYMBOL_PATH=$(_NT_SYMBOL_PATH))
   endif
 endif
 
 ################################################################################
 # Hook to include the corresponding custom file, if present.
 $(eval $(call IncludeCustomExtension, RunTests.gmk))
 ################################################################################
 
-# This is the JDK that we will test
-JDK_UNDER_TEST := $(JDK_IMAGE_DIR)
-
 TEST_RESULTS_DIR := $(OUTPUTDIR)/test-results
 TEST_SUPPORT_DIR := $(OUTPUTDIR)/test-support
 TEST_SUMMARY := $(TEST_RESULTS_DIR)/test-summary.txt
 TEST_LAST_IDS := $(TEST_SUPPORT_DIR)/test-last-ids.txt
 

@@ -105,39 +102,12 @@
       -timeoutHandler:jdk.test.failurehandler.jtreg.GatherProcessInfoTimeoutHandler \
       -observer:jdk.test.failurehandler.jtreg.GatherDiagnosticInfoObserver \
       -timeoutHandlerTimeout:0
 endif
 
-GTEST_LAUNCHER_DIRS := $(patsubst %/gtestLauncher, %, \
-    $(wildcard $(TEST_IMAGE_DIR)/hotspot/gtest/*/gtestLauncher))
-GTEST_VARIANTS := $(strip $(patsubst $(TEST_IMAGE_DIR)/hotspot/gtest/%, %, \
-    $(GTEST_LAUNCHER_DIRS)))
-
-COV_ENVIRONMENT :=
-JTREG_COV_OPTIONS :=
-
-ifeq ($(TEST_OPTS_JCOV), true)
-  JCOV_OUTPUT_DIR := $(TEST_RESULTS_DIR)/jcov-output
-  JCOV_GRABBER_LOG := $(JCOV_OUTPUT_DIR)/grabber.log
-  JCOV_RESULT_FILE := $(JCOV_OUTPUT_DIR)/result.xml
-  JCOV_REPORT := $(JCOV_OUTPUT_DIR)/report
-  JCOV_MEM_OPTIONS := -Xms64m -Xmx4g
-
-  # Replace our normal test JDK with the JCov image.
-  JDK_UNDER_TEST := $(JCOV_IMAGE_DIR)
-
-  COV_ENVIRONMENT += JAVA_TOOL_OPTIONS="$(JCOV_MEM_OPTIONS)" \
-      _JAVA_OPTIONS="$(JCOV_MEM_OPTIONS)"
-  JTREG_COV_OPTIONS += -e:JAVA_TOOL_OPTIONS='$(JCOV_MEM_OPTIONS)' \
-      -e:_JAVA_OPTIONS='$(JCOV_MEM_OPTIONS)'
-endif
-
-ifeq ($(GCOV_ENABLED), true)
-  GCOV_OUTPUT_DIR := $(TEST_RESULTS_DIR)/gcov-output
-  COV_ENVIRONMENT += GCOV_PREFIX="$(GCOV_OUTPUT_DIR)"
-  JTREG_COV_OPTIONS += -e:GCOV_PREFIX="$(GCOV_OUTPUT_DIR)"
-endif
+GTEST_LAUNCHER_DIRS := $(patsubst %/gtestLauncher, %, $(wildcard $(TEST_IMAGE_DIR)/hotspot/gtest/*/gtestLauncher))
+GTEST_VARIANTS := $(strip $(patsubst $(TEST_IMAGE_DIR)/hotspot/gtest/%, %, $(GTEST_LAUNCHER_DIRS)))
 
 ################################################################################
 # Optionally create AOT libraries for specified modules before running tests.
 # Note, this could not be done during JDK build time.
 ################################################################################

@@ -157,40 +127,40 @@
 SetupAotModule = $(NamedParamsMacroTemplate)
 define SetupAotModuleBody
   $1_AOT_LIB := $$($1_BIN)/$$(call SHARED_LIBRARY,$$($1_MODULE))
   $1_AOT_CCLIST := $$(wildcard $$(TOPDIR)/test/hotspot/jtreg/compiler/aot/scripts/$$($1_MODULE)-list.txt)
 
-  # Create jaotc flags.
-  # VM flags which don't affect AOT code generation are filtered out:
-  # -Xcomp, -XX:+-TieredCompilation
+  ifeq ($(OPENJDK_TARGET_OS), windows)
+    $1_LD := $$(addsuffix $$(EXE_SUFFIX), $$(filter-out $$(FIXPATH), $$(LD)))
+  else
+    $1_LD := $$(LD)
+  endif
+
   $1_JAOTC_OPTS := \
       -J-Xmx4g --info \
-      $$(addprefix -J, $$(filter-out -Xcomp %TieredCompilation, $$($1_VM_OPTIONS))) \
+      $$(addprefix -J, $$($1_VM_OPTIONS)) \
       $$(addprefix --compile-commands$(SPACE), $$($1_AOT_CCLIST)) \
-      --linker-path $$(LD_JAOTC) \
+      --linker-path $$($1_LD) \
       #
 
   ifneq ($$(filter -ea, $$($1_VM_OPTIONS)), )
     $1_JAOTC_OPTS += --compile-with-assertions
   endif
 
-  $$($1_AOT_LIB): $$(JDK_UNDER_TEST)/release \
+  $$($1_AOT_LIB): $$(JDK_IMAGE_DIR)/release \
       $$(call DependOnVariable, $1_JAOTC_OPTS) \
-      $$(call DependOnVariable, JDK_UNDER_TEST)
+      $$(call DependOnVariable, JDK_IMAGE_DIR)
         $$(call LogWarn, Generating $$(patsubst $$(OUTPUTDIR)/%, %, $$@))
         $$(call MakeTargetDir)
         $$(call ExecuteWithLog, $$@, \
-            $((COV_ENVIRONMENT) \
-            $$(FIXPATH) $$(JDK_UNDER_TEST)/bin/jaotc \
-                $$($1_JAOTC_OPTS) --output $$@ --module $$($1_MODULE) \
+            $$(FIXPATH) $$(JDK_IMAGE_DIR)/bin/jaotc \
+            $$($1_JAOTC_OPTS) --output $$@ --module $$($1_MODULE) \
         )
         $$(call ExecuteWithLog, $$@.check, \
-            $$(FIXPATH) $$(JDK_UNDER_TEST)/bin/java \
-                $$($1_VM_OPTIONS) -XX:+UnlockDiagnosticVMOptions \
-                -XX:+PrintAOT -XX:+UseAOTStrictLoading \
-                -XX:AOTLibrary=$$@ -version \
-                 > $$@.verify-aot \
+          $$(FIXPATH) $$(JDK_IMAGE_DIR)/bin/java \
+          $$($1_VM_OPTIONS) -XX:+PrintAOT -XX:+UseAOTStrictLoading -XX:AOTLibrary=$$@ -version \
+          > $$@.verify-aot \
         )
 
   $1_AOT_OPTIONS += -XX:AOTLibrary=$$($1_AOT_LIB)
   $1_AOT_TARGETS += $$($1_AOT_LIB)
 endef

@@ -232,11 +202,11 @@
 TEST_JOBS_FACTOR_JDL ?= 1
 TEST_JOBS_FACTOR_MACHINE ?= 1
 
 ifeq ($(TEST_JOBS), 0)
   CORES_DIVIDER := 2
-  ifeq ($(call isTargetCpuArch, sparc), true)
+  ifeq ($(OPENJDK_TARGET_CPU_ARCH), sparc)
     # For smaller SPARC machines we see reasonable scaling of throughput up to
     # cpus/4 without affecting test reliability. On the bigger machines, cpus/4
     # causes intermittent timeouts.
     ifeq ($(shell $(EXPR) $(NUM_CORES) \> 16), 1)
       CORES_DIVIDER := 5

@@ -275,14 +245,13 @@
 
 $(eval $(call SetTestOpt,JOBS,JTREG))
 $(eval $(call SetTestOpt,TIMEOUT_FACTOR,JTREG))
 
 $(eval $(call ParseKeywordVariable, JTREG, \
-    SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR TEST_MODE ASSERT VERBOSE RETAIN \
-        MAX_MEM, \
-    STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS KEYWORDS \
-        EXTRA_PROBLEM_LISTS AOT_MODULES, \
+    SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR TEST_MODE ASSERT VERBOSE RETAIN MAX_MEM \
+        EXTRA_PROBLEM_LISTS KEYWORDS AOT_MODULES, \
+    STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS, \
 ))
 
 ifneq ($(JTREG), )
   # Inform the user
   $(info Running tests using JTREG control variable '$(JTREG)')

@@ -293,12 +262,12 @@
 $(eval $(call SetTestOpt,VM_OPTIONS,GTEST))
 $(eval $(call SetTestOpt,JAVA_OPTIONS,GTEST))
 $(eval $(call SetTestOpt,AOT_MODULES,GTEST))
 
 $(eval $(call ParseKeywordVariable, GTEST, \
-    SINGLE_KEYWORDS := REPEAT, \
-    STRING_KEYWORDS := OPTIONS VM_OPTIONS JAVA_OPTIONS AOT_MODULES, \
+    SINGLE_KEYWORDS := REPEAT AOT_MODULES, \
+    STRING_KEYWORDS := OPTIONS VM_OPTIONS JAVA_OPTIONS, \
 ))
 
 ifneq ($(GTEST), )
   # Inform the user
   $(info Running tests using GTEST control variable '$(GTEST)')

@@ -309,12 +278,11 @@
 $(eval $(call SetTestOpt,VM_OPTIONS,MICRO))
 $(eval $(call SetTestOpt,JAVA_OPTIONS,MICRO))
 
 $(eval $(call ParseKeywordVariable, MICRO, \
     SINGLE_KEYWORDS := ITER FORK TIME WARMUP_ITER WARMUP_TIME, \
-    STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS RESULTS_FORMAT TEST_JDK \
-        BENCHMARKS_JAR, \
+    STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS RESULTS_FORMAT TEST_JDK BENCHMARKS_JAR, \
 ))
 
 ifneq ($(MICRO), )
   # Inform the user
   $(info Running tests using MICRO control variable '$(MICRO)')

@@ -376,12 +344,12 @@
   )
 endef
 
 # Helper function to determine if a test specification is a microbenchmark test
 #
-# It is a microbenchmark test if it is either "micro", or "micro:" followed by
-# an optional test filter string.
+# It is a microbenchmark test if it is either "micro", or "micro:" followed by an optional
+# test filter string.
 define ParseMicroTestSelection
   $(if $(filter micro%, $1), \
     $(if $(filter micro, $1), \
       micro:all \
     , \

@@ -467,12 +435,12 @@
 endef
 
 # Helper function to determine if a test specification is a special test
 #
 # It is a special test if it is "special:" followed by a test name,
-# if it is "make:" or "make-" followed by a make test, or any of the special
-# test names as a single word.
+# if it is "make:" or "make-" followed by a make test, or any of the special test names
+# as a single word.
 define ParseSpecialTestSelection
   $(if $(filter special:%, $1), \
     $1 \
   ) \
   $(if $(filter make%, $1), \

@@ -583,17 +551,17 @@
         MODULES := $$(GTEST_AOT_MODULES), \
         VM_OPTIONS := $$(GTEST_VM_OPTIONS) $$(GTEST_JAVA_OPTIONS), \
     ))
   endif
 
-  run-test-$1: pre-run-test $$($1_AOT_TARGETS)
+  run-test-$1: $$($1_AOT_TARGETS)
         $$(call LogWarn)
         $$(call LogWarn, Running test '$$($1_TEST)')
         $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
         $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/gtest, \
             $$(FIXPATH) $$(TEST_IMAGE_DIR)/hotspot/gtest/$$($1_VARIANT)/gtestLauncher \
-                -jdk $(JDK_UNDER_TEST) $$($1_GTEST_FILTER) \
+                -jdk $(JDK_IMAGE_DIR) $$($1_GTEST_FILTER) \
                 --gtest_output=xml:$$($1_TEST_RESULTS_DIR)/gtest.xml \
                 $$($1_GTEST_REPEAT) $$(GTEST_OPTIONS) $$(GTEST_VM_OPTIONS) \
                 $$(GTEST_JAVA_OPTIONS) $$($1_AOT_OPTIONS) \
                 > >($(TEE) $$($1_TEST_RESULTS_DIR)/gtest.txt) \
             && $$(ECHO) $$$$? > $$($1_EXITCODE) \

@@ -625,13 +593,11 @@
           $$(eval $1_TOTAL := 1) \
         )
 
   $1: run-test-$1 parse-test-$1
 
-  TARGETS += $1 run-test-$1 parse-test-$1
-  TEST_TARGETS += parse-test-$1
-
+  TARGETS += $1
 endef
 
 ################################################################################
 
 ### Rules for Microbenchmarks

@@ -661,22 +627,19 @@
   $1_EXITCODE := $$($1_TEST_RESULTS_DIR)/exitcode.txt
 
   $1_TEST_NAME := $$(strip $$(patsubst micro:%, %, $$($1_TEST)))
 
   $$(eval $$(call SetMicroValue,$1,MICRO_BENCHMARKS_JAR,$$(TEST_IMAGE_DIR)/micro/benchmarks.jar))
-  $$(eval $$(call SetMicroValue,$1,MICRO_TEST_JDK,$$(JDK_UNDER_TEST)))
+  $$(eval $$(call SetMicroValue,$1,MICRO_TEST_JDK,$$(JDK_IMAGE_DIR)))
   $$(eval $$(call SetMicroValue,$1,MICRO_JAVA_OPTIONS))
 
   # Current tests needs to open java.io
   $1_MICRO_JAVA_OPTIONS += --add-opens=java.base/java.io=ALL-UNNAMED
-  # Set library path for native dependencies
-  $1_MICRO_JAVA_OPTIONS += -Djava.library.path=$$(TEST_IMAGE_DIR)/micro/native
 
   # Save output as JSON or CSV file
   ifneq ($$(MICRO_RESULTS_FORMAT), )
-    $1_MICRO_BASIC_OPTIONS += -rf $$(MICRO_RESULTS_FORMAT)
-    $1_MICRO_BASIC_OPTIONS += -rff $$($1_TEST_RESULTS_DIR)/jmh-result.$(MICRO_RESULTS_FORMAT)
+    $1_MICRO_BASIC_OPTIONS += -rf $$(MICRO_RESULTS_FORMAT) -rff $$($1_TEST_RESULTS_DIR)/jmh-result.$(MICRO_RESULTS_FORMAT)
   endif
 
   ifneq ($$(MICRO_VM_OPTIONS)$$(MICRO_JAVA_OPTIONS), )
     $1_MICRO_VM_OPTIONS := -jvmArgs $$(MICRO_VM_OPTIONS) $$(MICRO_JAVA_OPTIONS)
   endif

@@ -695,20 +658,19 @@
   endif
   ifneq ($$(MICRO_WARMUP_TIME), )
     $1_MICRO_WARMUP_TIME := -w $$(MICRO_WARMUP_TIME)
   endif
 
-  run-test-$1: pre-run-test
+  run-test-$1:
         $$(call LogWarn)
         $$(call LogWarn, Running test '$$($1_TEST)')
         $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
         $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/micro, \
-            $$(FIXPATH) $$($1_MICRO_TEST_JDK)/bin/java $$($1_MICRO_JAVA_OPTIONS) \
-                -jar $$($1_MICRO_BENCHMARKS_JAR) \
+            $$($1_MICRO_TEST_JDK)/bin/java $$($1_MICRO_JAVA_OPTIONS) -jar $$($1_MICRO_BENCHMARKS_JAR) \
                 $$($1_MICRO_ITER) $$($1_MICRO_FORK) $$($1_MICRO_TIME) \
                 $$($1_MICRO_WARMUP_ITER) $$($1_MICRO_WARMUP_TIME) \
-                $$($1_MICRO_VM_OPTIONS) $$($1_MICRO_BASIC_OPTIONS) $$(MICRO_OPTIONS) \
+                $$($1_MICRO_VM_OPTIONS) $$($1_MICRO_BASIC_OPTIONS) $$(MICRO_OPTIONS)  \
                 $$($1_TEST_NAME) \
                 > >($(TEE) $$($1_TEST_RESULTS_DIR)/micro.txt) \
             && $$(ECHO) $$$$? > $$($1_EXITCODE) \
             || $$(ECHO) $$$$? > $$($1_EXITCODE) \
         )

@@ -738,13 +700,11 @@
           $$(eval $1_TOTAL := 1) \
         )
 
   $1: run-test-$1 parse-test-$1
 
-  TARGETS += $1 run-test-$1 parse-test-$1
-  TEST_TARGETS += parse-test-$1
-
+  TARGETS += $1
 endef
 
 ################################################################################
 
 ### Rules for Jtreg

@@ -819,11 +779,11 @@
   # Make sure MaxRAMPercentage is high enough to not cause OOM or swapping since
   # we may end up with a lot of JVM's
   $1_JTREG_MAX_RAM_PERCENTAGE := $$(shell $$(EXPR) 25 / $$($1_JTREG_JOBS))
 
   # SPARC is in general slower per core so need to scale up timeouts a bit.
-  ifeq ($(call isTargetCpuArch, sparc), true)
+  ifeq ($(OPENJDK_TARGET_CPU_ARCH), sparc)
     JTREG_TIMEOUT_FACTOR ?= 8
   else
     JTREG_TIMEOUT_FACTOR ?= 4
   endif
   JTREG_VERBOSE ?= fail,error,summary

@@ -846,11 +806,11 @@
   $1_JTREG_BASIC_OPTIONS += -e:JIB_DATA_DIR
   # Some tests needs to find a boot JDK using the JDK8_HOME variable.
   $1_JTREG_BASIC_OPTIONS += -e:JDK8_HOME=$$(BOOT_JDK)
   # If running on Windows, propagate the _NT_SYMBOL_PATH to enable
   # symbol lookup in hserr files
-  ifeq ($$(call isTargetOs, windows), true)
+  ifeq ($$(OPENJDK_TARGET_OS), windows)
     $1_JTREG_BASIC_OPTIONS += -e:_NT_SYMBOL_PATH
   endif
 
   $1_JTREG_BASIC_OPTIONS += \
       $$(addprefix -javaoption:, $$(JTREG_JAVA_OPTIONS)) \

@@ -879,12 +839,11 @@
 
   ifneq ($$(JIB_HOME), )
     $1_JTREG_BASIC_OPTIONS += -e:JIB_HOME=$$(JIB_HOME)
   endif
 
-  $1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_DIR=$(TEST_IMAGE_DIR)
-  $1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_GRAAL_DIR=$(TEST_IMAGE_DIR)/hotspot/jtreg/graal
+  $1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_GRAAL_DIR=${TEST_IMAGE_DIR}/hotspot/jtreg/graal
 
   ifneq ($$(JTREG_FAILURE_HANDLER_OPTIONS), )
     $1_JTREG_LAUNCHER_OPTIONS += -Djava.library.path="$(JTREG_FAILURE_HANDLER_DIR)"
   endif
 

@@ -911,26 +870,24 @@
   endif
 
   clean-workdir-$1:
         $$(RM) -r $$($1_TEST_SUPPORT_DIR)
 
-  run-test-$1: pre-run-test clean-workdir-$1 $$($1_AOT_TARGETS)
+  run-test-$1: clean-workdir-$1 $$($1_AOT_TARGETS)
         $$(call LogWarn)
         $$(call LogWarn, Running test '$$($1_TEST)')
         $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
         $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/jtreg, \
-            $$(COV_ENVIRONMENT) \
             $$(JAVA) $$($1_JTREG_LAUNCHER_OPTIONS) \
                 -Dprogram=jtreg -jar $$(JT_HOME)/lib/jtreg.jar \
                 $$($1_JTREG_BASIC_OPTIONS) \
-                -testjdk:$$(JDK_UNDER_TEST) \
+                -testjdk:$$(JDK_IMAGE_DIR) \
                 -dir:$$(JTREG_TOPDIR) \
                 -reportDir:$$($1_TEST_RESULTS_DIR) \
                 -workDir:$$($1_TEST_SUPPORT_DIR) \
                 $$(JTREG_OPTIONS) \
                 $$(JTREG_FAILURE_HANDLER_OPTIONS) \
-                $$(JTREG_COV_OPTIONS) \
                 $$($1_TEST_NAME) \
             && $$(ECHO) $$$$? > $$($1_EXITCODE) \
             || $$(ECHO) $$$$? > $$($1_EXITCODE) \
         )
 

@@ -960,15 +917,13 @@
           $$(eval $1_FAILED := 0) \
           $$(eval $1_ERROR := 1) \
           $$(eval $1_TOTAL := 1) \
         )
 
-  $1: run-test-$1 parse-test-$1 clean-workdir-$1
-
-  TARGETS += $1 run-test-$1 parse-test-$1 clean-workdir-$1
-  TEST_TARGETS += parse-test-$1
+  $1: run-test-$1 parse-test-$1
 
+  TARGETS += $1
 endef
 
 ################################################################################
 
 ### Rules for special tests

@@ -1002,11 +957,11 @@
         TestMake.gmk $$($1_TEST_ARGS))
   else
     $$(error Invalid special test specification: $$($1_TEST_NAME))
   endif
 
-  run-test-$1: pre-run-test
+  run-test-$1: $(TEST_PREREQS)
         $$(call LogWarn)
         $$(call LogWarn, Running test '$$($1_TEST)')
         $$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
         $$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/test-execution, \
             $$($1_TEST_COMMAND_LINE) \

@@ -1028,13 +983,11 @@
         $$(eval $1_ERROR := 0)
         $$(eval $1_TOTAL := 0)
 
   $1: run-test-$1 parse-test-$1
 
-  TARGETS += $1 run-test-$1 parse-test-$1
-  TEST_TARGETS += parse-test-$1
-
+  TARGETS += $1
 endef
 
 ################################################################################
 # Setup and execute make rules for all selected tests
 ################################################################################

@@ -1092,40 +1045,16 @@
 
 ################################################################################
 # The main target for RunTests.gmk
 ################################################################################
 
-#
-# Provide hooks for adding functionality before and after all tests are run.
-#
-
-$(call LogInfo, RunTest setup starting)
+# The SetupRun*Test functions have populated TARGETS.
 
-# This target depends on all actual test having been run (TEST_TARGETS has beeen
-# populated by the SetupRun*Test functions). If you need to provide a teardown
-# hook, you must let it depend on this target.
-run-all-tests: $(TEST_TARGETS)
-        $(call LogInfo, RunTest teardown starting)
-
-# This is an abstract target that will be run before any actual tests. Add your
-# target as a dependency to thisif you need "setup" type functionality executed
-# before all tests.
-pre-run-test:
-        $(call LogInfo, RunTest setup done)
-
-# This is an abstract target that will be run after all actual tests, but before
-# the test summary. If you need "teardown" type functionality, add your target
-# as a dependency on this, and let the teardown target depend on run-all-tests.
-post-run-test: run-all-tests
-        $(call LogInfo, RunTest teardown done)
-
-#
-# Create and print a table of the result of all tests run
-#
 TEST_FAILURE := false
 
-run-test-report: post-run-test
+run-test: $(TARGETS)
+        # Create and print a table of the result of all tests run
         $(RM) $(TEST_SUMMARY).old 2> /dev/null
         $(MV) $(TEST_SUMMARY) $(TEST_SUMMARY).old 2> /dev/null || true
         $(RM) $(TEST_LAST_IDS).old 2> /dev/null
         $(MV) $(TEST_LAST_IDS) $(TEST_LAST_IDS).old 2> /dev/null || true
         $(ECHO) >> $(TEST_SUMMARY) ==============================

@@ -1165,64 +1094,10 @@
         )
         $(ECHO)
         $(CAT) $(TEST_SUMMARY)
         $(ECHO)
 
-# The main run-test target
-run-test: run-test-report
-
-TARGETS += run-all-tests pre-run-test post-run-test run-test-report run-test
-
-################################################################################
-# Setup JCov
-################################################################################
-
-ifeq ($(TEST_OPTS_JCOV), true)
-
-  jcov-do-start-grabber:
-        $(call MakeDir, $(JCOV_OUTPUT_DIR))
-        if $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -status 1>/dev/null 2>&1 ; then \
-          $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -stop -stoptimeout 3600 ; \
-        fi
-        $(JAVA) -Xmx4g -jar $(JCOV_HOME)/lib/jcov.jar Grabber -v -t \
-            $(JCOV_IMAGE_DIR)/template.xml -o $(JCOV_RESULT_FILE) \
-            1>$(JCOV_GRABBER_LOG) 2>&1 &
-
-  jcov-start-grabber: jcov-do-start-grabber
-        $(call LogWarn, Starting JCov Grabber...)
-        $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -t 600 -wait
-
-  jcov-stop-grabber:
-        $(call LogWarn, Stopping JCov Grabber...)
-        $(JAVA) -jar $(JCOV_HOME)/lib/jcov.jar GrabberManager -stop -stoptimeout 3600
-
-  JCOV_REPORT_TITLE := JDK code coverage report<br/>
-  ifneq ($(JCOV_FILTERS), )
-    JCOV_REPORT_TITLE += Code filters: $(JCOV_FILTERS)<br>
-  endif
-  JCOV_REPORT_TITLE += Tests: $(TEST)
-
-  jcov-gen-report: jcov-stop-grabber
-        $(call LogWarn, Generating JCov report ...)
-        $(JAVA) -Xmx4g -jar $(JCOV_HOME)/lib/jcov.jar RepGen -sourcepath \
-            `$(ECHO) $(TOPDIR)/src/*/share/classes/ | $(TR) ' ' ':'` -fmt html \
-            $(JCOV_FILTERS) \
-            -mainReportTitle "$(JCOV_REPORT_TITLE)" \
-            -o $(JCOV_REPORT) $(JCOV_RESULT_FILE)
-
-  TARGETS += jcov-do-start-grabber jcov-start-grabber jcov-stop-grabber \
-      jcov-gen-report
-
-  # Hook this into the framework at appropriate places
-  pre-run-test: jcov-start-grabber
-
-  post-run-test: jcov-gen-report
-
-  jcov-gen-report: run-all-tests
-
-endif
-
 ################################################################################
 
 all: run-test
 
-.PHONY: default all $(TARGETS)
+.PHONY: default all run-test $(TARGETS)
< prev index next >