Skip to content

Test reporting should consistently count and report test failures, not compiler errors #4383

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 16 additions & 19 deletions compiler/test/dotty/tools/vulpix/ParallelTesting.scala
Original file line number Diff line number Diff line change
Expand Up @@ -226,16 +226,12 @@ trait ParallelTesting extends RunnerOrchestration { self =>
/** Total amount of test sources being compiled by this test */
val sourceCount = filteredSources.length

private[this] var _errorCount = 0
def errorCount: Int = _errorCount

private[this] var _testSourcesCompleted = 0
private def testSourcesCompleted: Int = _testSourcesCompleted

/** Complete the current compilation with the amount of errors encountered */
protected final def registerCompletion(errors: Int) = synchronized {
protected final def registerCompletion() = synchronized {
_testSourcesCompleted += 1
_errorCount += errors
}

sealed trait Failure
Expand All @@ -244,17 +240,20 @@ trait ParallelTesting extends RunnerOrchestration { self =>
case object Generic extends Failure

private[this] var _failures = Set.empty[Failure]
private[this] var _failureCount = 0

/** Fail the current test */
protected[this] final def fail(failure: Failure = Generic): Unit = synchronized {
_failures = _failures + failure
_failureCount = _failureCount + 1
}
def didFail: Boolean = _failures.nonEmpty
def didFail: Boolean = _failureCount != 0

/** A set of the different failures */
def failureReasons: Set[Failure] = _failures

/** Number of failed tests */
def failureCount: Int = _failures.size
def failureCount: Int = _failureCount

protected def logBuildInstructions(reporter: TestReporter, testSource: TestSource, err: Int, war: Int) = {
val errorMsg = testSource.buildInstructions(reporter.errorCount, reporter.warningCount)
Expand Down Expand Up @@ -327,7 +326,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
// run should fail
failTestSource(testSource)
e.printStackTrace()
registerCompletion(1)
registerCompletion()
throw e
}
}
Expand Down Expand Up @@ -555,7 +554,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
}
else if (fromTasty) compileFromTasty(flags, false, outDir)
else compile(testSource.sourceFiles, flags, false, outDir)
registerCompletion(reporter.errorCount)
registerCompletion()

if (reporter.compilerCrashed || reporter.errorCount > 0) {
logReporterContents(reporter)
Expand All @@ -574,7 +573,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>

def warningCount = reporters.foldLeft(0)(_ + _.warningCount)

registerCompletion(errorCount)
registerCompletion()

if (compilerCrashed || errorCount > 0) {
reporters.foreach(logReporterContents)
Expand Down Expand Up @@ -695,7 +694,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
addFailureInstruction(buildInstr)
failTestSource(testSource)
}
registerCompletion(errorCount)
registerCompletion()
}
}
}
Expand Down Expand Up @@ -748,9 +747,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
true
}
else {
echo {
s"Error reported in ${error.pos.source}, but no annotation found"
}
echo(s"Error reported in ${error.pos.source}, but no annotation found")
false
}
}
Expand Down Expand Up @@ -798,7 +795,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
else if (!errorMap.isEmpty)
fail(s"\nExpected error(s) have {<error position>=<unreported error>}: $errorMap")

registerCompletion(actualErrors)
registerCompletion()
}
}
}
Expand Down Expand Up @@ -1012,11 +1009,11 @@ trait ParallelTesting extends RunnerOrchestration { self =>

/** Extract `Failure` set and render from `Test` */
private[this] def reasonsForFailure(test: Test): String = {
val errors =
if (test.errorCount == 0) ""
else s"\n - encountered ${test.errorCount} error(s)"
val failureReport =
if (test.failureCount == 0) ""
else s"\n - encountered ${test.failureCount} test failures(s)"

errors + test.failureReasons.collect {
failureReport + test.failureReasons.collect {
case test.TimeoutFailure(title) =>
s" - test '$title' timed out"
case test.JavaCompilationFailure(msg) =>
Expand Down
2 changes: 1 addition & 1 deletion compiler/test/dotty/tools/vulpix/SummaryReport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ final class SummaryReport extends SummaryReporting {
|Test Report
|================================================================================
|
|$passed passed, $failed failed, ${passed + failed} total
|$passed suites passed, $failed failed, ${passed + failed} total
|""".stripMargin
)

Expand Down
3 changes: 2 additions & 1 deletion compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ class VulpixUnitTests extends ParallelTesting {
fail()
} catch {
case ae: AssertionError =>
assertEquals(ae.getMessage, "Run test failed, but should not, reasons:\n - test 'tests/vulpix-tests/unit/timeout.scala' timed out")
assertEquals("Run test failed, but should not, reasons:\n\n - encountered 1 test failures(s) - test 'tests/vulpix-tests/unit/timeout.scala' timed out",
ae.getMessage)
}
}
}
7 changes: 4 additions & 3 deletions tests/vulpix-tests/meta/sbt-output.check
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ Diff (expected on the left, actual right):
EOF | EOF

[error] Test dotty.tools.vulpix.VulpixMetaTests.runAll failed: java.lang.AssertionError: Run test failed, but should not, reasons:
[error] , took 1.682 sec SKIP
[error]
[error] - encountered 1 test failures(s), took 3.697 sec SKIP
[error] at dotty.tools.vulpix.ParallelTesting$CompilationTest.checkRuns(ParallelTesting.scala:993) SKIP
[error] at dotty.tools.vulpix.VulpixMetaTests.runAll(VulpixMetaTests.scala:26) SKIP
[error] ...
Expand All @@ -18,7 +19,7 @@ Testing tests/vulpix-tests/meta/neg/missing-error-annotation.scala
Wrong number of errors encountered when compiling out/VulpixMetaTests/neg/missing-error-annotation, expected: 0, actual: 2
[error] Test dotty.tools.vulpix.VulpixMetaTests.compileNeg failed: java.lang.AssertionError: Neg test shouldn't have failed, but did. Reasons:
[error]
[error] - encountered 2 error(s), took 0.093 sec SKIP
[error] - encountered 1 test failures(s), took 0.156 sec SKIP
[error] at dotty.tools.vulpix.ParallelTesting$CompilationTest.checkExpectedErrors(ParallelTesting.scala:975) SKIP
[error] at dotty.tools.vulpix.VulpixMetaTests.compileNeg(VulpixMetaTests.scala:25) SKIP
[error] ...
Expand All @@ -30,7 +31,7 @@ Testing tests/vulpix-tests/meta/pos/does-not-compile.scala
| not found: a
[error] Test dotty.tools.vulpix.VulpixMetaTests.compilePos failed: java.lang.AssertionError: Expected no errors when compiling, failed for the following reason(s):
[error]
[error] - encountered 1 error(s), took 0.069 sec SKIP
[error] - encountered 1 test failure(s), took 0.069 sec SKIP
[error] at dotty.tools.vulpix.ParallelTesting$CompilationTest.checkCompile(ParallelTesting.scala:958) SKIP
[error] at dotty.tools.vulpix.VulpixMetaTests.compilePos(VulpixMetaTests.scala:24) SKIP
[error] ...
Expand Down