Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue #2944: Changed the logging levels #3491

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
3 changes: 3 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Expand Up @@ -38,6 +38,7 @@ class CliIntegrationTest : AbstractCliIntegrationTest() {
"-outputDir", dokkaOutputDir.path,
"-pluginsClasspath", basePluginJarFile.path,
"-moduleName", "Basic Project",
"-loggingLevel", "DEBUG",
"-sourceSet",
buildString {
append(" -sourceSetName cliMain")
Expand Down Expand Up @@ -279,7 +280,7 @@ class CliIntegrationTest : AbstractCliIntegrationTest() {
}

val process = ProcessBuilder(
"java", "-jar", cliJarFile.path, jsonPath
"java", "-jar", cliJarFile.path, jsonPath, "-loggingLevel", "DEBUG"
).redirectErrorStream(true).start()

val result = process.awaitProcessResult()
Expand Down Expand Up @@ -351,7 +352,7 @@ class CliIntegrationTest : AbstractCliIntegrationTest() {
}

val process = ProcessBuilder(
"java", "-jar", cliJarFile.path, jsonPath
"java", "-jar", cliJarFile.path, jsonPath, "-loggingLevel", "DEBUG"
).redirectErrorStream(true).start()

val result = process.awaitProcessResult()
Expand Down
Expand Up @@ -31,7 +31,7 @@ class BiojavaIntegrationTest : AbstractIntegrationTest(), TestOutputCopier {
@Test
fun `dokka javadoc`() {
val result = ProcessBuilder().directory(projectDir)
.command(mavenBinaryFile.absolutePath, "dokka:javadoc", "-pl", "biojava-core", "\"-Ddokka_version=$currentDokkaVersion\"", "-U", "-e").start().awaitProcessResult()
.command(mavenBinaryFile.absolutePath, "dokka:javadoc", "-pl", "biojava-core", "\"-Ddokka_version=$currentDokkaVersion\"", "-X", "-U", "-e").start().awaitProcessResult()

diagnosticAsserts(result)

Expand Down
Expand Up @@ -57,7 +57,7 @@ class MavenIntegrationTest : AbstractIntegrationTest() {
@Test
fun `dokka dokka`() {
val result = ProcessBuilder().directory(projectDir)
.command(mavenBinaryFile.absolutePath, "dokka:dokka", "-U", "-e").start().awaitProcessResult()
.command(mavenBinaryFile.absolutePath, "dokka:dokka", "-X", "-U", "-e").start().awaitProcessResult()

diagnosticAsserts(result)

Expand Down Expand Up @@ -106,7 +106,7 @@ class MavenIntegrationTest : AbstractIntegrationTest() {
@Test
fun `dokka javadoc`() {
val result = ProcessBuilder().directory(projectDir)
.command(mavenBinaryFile.absolutePath, "dokka:javadoc", "-U", "-e").start().awaitProcessResult()
.command(mavenBinaryFile.absolutePath, "dokka:javadoc", "-X", "-U", "-e").start().awaitProcessResult()

diagnosticAsserts(result)

Expand All @@ -128,7 +128,7 @@ class MavenIntegrationTest : AbstractIntegrationTest() {
@Test
fun `dokka javadocJar`() {
val result = ProcessBuilder().directory(projectDir)
.command(mavenBinaryFile.absolutePath, "dokka:javadocJar", "-U", "-e").start().awaitProcessResult()
.command(mavenBinaryFile.absolutePath, "dokka:javadocJar", "-X", "-U", "-e").start().awaitProcessResult()

diagnosticAsserts(result)

Expand Down
Expand Up @@ -47,7 +47,7 @@ public class DocCommentFinder(
return superMethodDocumentation.single()
}

logger.debug(
logger.warn(
"Conflicting documentation for ${DRI.from(method)}" +
"${superMethods.map { DRI.from(it) }}"
)
Expand Down
Expand Up @@ -87,7 +87,7 @@ internal class DokkaMessageCollector(private val logger: DokkaLogger) : MessageC
if (severity == CompilerMessageSeverity.ERROR) {
seenErrors = true
}
logger.info(MessageRenderer.PLAIN_FULL_PATHS.render(severity, message, location))
logger.debug(MessageRenderer.PLAIN_FULL_PATHS.render(severity, message, location))
}

override fun hasErrors() = seenErrors
Expand Down
Expand Up @@ -73,7 +73,7 @@ internal class DescriptorSampleAnalysisEnvironment(

val samplePsiElement = resolveSession.resolveSamplePsiElement(sourceSet, fullyQualifiedLink)
if (samplePsiElement == null) {
dokkaLogger.debug("Cannot resolve sample element for: \"$fullyQualifiedLink\"")
dokkaLogger.warn("Cannot resolve sample element for: \"$fullyQualifiedLink\"")
return null
} else if (samplePsiElement.containingFile !is KtFile) {
dokkaLogger.warn("Unable to resolve non-Kotlin @sample links: \"$fullyQualifiedLink\"")
Expand All @@ -92,7 +92,7 @@ internal class DescriptorSampleAnalysisEnvironment(
): PsiElement? {
val packageDescriptor = resolveNearestPackageDescriptor(fqLink)
if (packageDescriptor == null) {
dokkaLogger.debug(
dokkaLogger.warn(
"Unable to resolve package descriptor for @sample: \"$fqLink\";"
)
return null
Expand Down Expand Up @@ -154,7 +154,7 @@ internal class DescriptorSampleAnalysisEnvironment(
if (packageDescriptor != null) {
return packageDescriptor
}
dokkaLogger.debug("Failed to resolve package \"$supposedPackageName\" for sample \"$fqLink\"")
dokkaLogger.warn("Failed to resolve package \"$supposedPackageName\" for sample \"$fqLink\"")

if (isRootPackage) {
// cannot go any deeper
Expand Down
Expand Up @@ -164,7 +164,7 @@ internal fun topologicalSortByDependantSourceSets(
souceSet.dependentSourceSets.mapNotNull { dependentSourceSetId ->
sourceSets.find { it.sourceSetID == dependentSourceSetId }
// just skip
?: null.also { logger.error("Unknown source set Id $dependentSourceSetId in dependencies of ${souceSet.sourceSetID}") }
?: null.also { logger.warn("Unknown source set Id $dependentSourceSetId in dependencies of ${souceSet.sourceSetID}") }
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will mention it here, but it affects all error->warn changes.
Previously we emitted a lot of errors, but now we report only 3 of them and a lot of warnings. So if project will not set failOnWarning=true there could be a lot of warnings which could affect quality of generated HTML.
I would expect that users who generate documentation will not check that produced HTML is still fine after new declaration (or anything else) is added/removed. Also those warnings could be easily missed when updating project/Dokka, especially if the project documentation is build on CI.
I'm just trying to understand how it will affect our users. May be we need to set failOnWarning=true by default in this case, to prevent users from generation of bad HTML?

Or may be we should better specify what is successful documentation generation? As from DokkaLogger.error documentation:

This level is for logging error messages that describe what prevented Dokka from proceeding with successful documentation generation

What do we mean by successful documentation generation? Does HTML with absent data/declarations is considered successful?

}
verticesAssociatedWithState[souceSet] = State.VISITING
dependentSourceSets.forEach(::dfs)
Expand Down
2 changes: 1 addition & 1 deletion dokka-subprojects/core/api/dokka-core.api
Expand Up @@ -446,7 +446,7 @@ public final class org/jetbrains/dokka/SourceLinkDefinitionImpl$Companion {
public final class org/jetbrains/dokka/Timer {
public final fun dump (Ljava/lang/String;)V
public static synthetic fun dump$default (Lorg/jetbrains/dokka/Timer;Ljava/lang/String;ILjava/lang/Object;)V
public final fun report (Ljava/lang/String;)V
public final fun report (Ljava/lang/String;Lorg/jetbrains/dokka/utilities/LoggingLevel;)V
}

public abstract interface class org/jetbrains/dokka/generation/Generation {
Expand Down
Expand Up @@ -12,6 +12,7 @@ import org.jetbrains.dokka.generation.GracefulGenerationExit
import org.jetbrains.dokka.plugability.DokkaContext
import org.jetbrains.dokka.plugability.DokkaPlugin
import org.jetbrains.dokka.utilities.DokkaLogger
import org.jetbrains.dokka.utilities.LoggingLevel

/**
* DokkaGenerator is the main entry point for generating documentation
Expand All @@ -25,12 +26,12 @@ public class DokkaGenerator(

public fun generate() {
timed(logger) {
report("Initializing plugins")
report("Initializing plugins", LoggingLevel.DEBUG)
val context = initializePlugins(configuration, logger)

runCatching {
context.single(CoreExtensions.generation).run {
logger.progress("Dokka is performing: $generationName")
logger.debug("Dokka is performing: $generationName")
generate()
}
}.exceptionOrNull()?.let { e ->
Expand Down Expand Up @@ -59,18 +60,29 @@ public class DokkaGenerator(
public class Timer internal constructor(startTime: Long, private val logger: DokkaLogger?) {
private val steps = mutableListOf("" to startTime)

public fun report(name: String) {
logger?.progress(name)
// public fun report(name: String) {
// logger?.progress(name)
// steps += (name to System.currentTimeMillis())
// }

public fun report(name: String, logLevel: LoggingLevel) {
when(logLevel) {
LoggingLevel.DEBUG -> logger?.debug(name)
LoggingLevel.PROGRESS -> logger?.progress(name)
LoggingLevel.INFO -> logger?.info(name)
LoggingLevel.WARN -> logger?.warn(name)
LoggingLevel.ERROR -> logger?.error(name)
}
steps += (name to System.currentTimeMillis())
}

public fun dump(prefix: String = "") {
logger?.info(prefix)
logger?.debug(prefix)
val namePad = steps.map { it.first.length }.maxOrNull() ?: 0
val timePad = steps.windowed(2).map { (p1, p2) -> p2.second - p1.second }.maxOrNull()?.toString()?.length ?: 0
steps.windowed(2).forEach { (p1, p2) ->
if (p1.first.isNotBlank()) {
logger?.info("${p1.first.padStart(namePad)}: ${(p2.second - p1.second).toString().padStart(timePad)}")
logger?.debug("${p1.first.padStart(namePad)}: ${(p2.second - p1.second).toString().padStart(timePad)}")
}
}
}
Expand All @@ -81,9 +93,9 @@ private fun timed(logger: DokkaLogger? = null, block: Timer.() -> Unit): Timer =
try {
block()
} catch (exit: GracefulGenerationExit) {
report("Exiting Generation: ${exit.reason}")
report("Exiting Generation: ${exit.reason}", LoggingLevel.PROGRESS)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

may be here we can just do logger.progress(exit.reason) and leave report logic fully with debug level, and so no need to have report with custom logging level?
F.e:

try { 
  block()
  report("Exiting Generation")
} catch (exit: GracefulGenerationExit) { 
  logger.progress(exit.reason)
  report("Exiting Generation: ${exit.reason}")
} catch (cause: Throwable) {
  report("Exiting Generation: ${cause.message}")
  throw cause
}

AFAIU report is mostly for debugging purpose

} finally {
report("")
report("", LoggingLevel.PROGRESS)
}
}

Expand Up @@ -217,9 +217,9 @@ private class DokkaContextConfigurationImpl(
"\t${it.key} by " + (it.value.singleOrNull() ?: it.value)
}

logger.info("Loaded plugins: $pluginNames")
logger.info("Loaded: $loadedListForDebug")
logger.info("Suppressed: $suppressedList")
logger.debug("Loaded plugins: $pluginNames")
logger.debug("Loaded: $loadedListForDebug")
logger.debug("Suppressed: $suppressedList")
}
}

Expand Down
Expand Up @@ -108,14 +108,14 @@ public interface DokkaLogger {

public fun DokkaLogger.report() {
if (warningsCount > 0 || errorsCount > 0) {
info(
progress(
"Generation completed with $warningsCount warning" +
(if (warningsCount == 1) "" else "s") +
" and $errorsCount error" +
if (errorsCount == 1) "" else "s"
)
} else {
info("Generation completed successfully")
progress("Generation completed successfully")
}
}

Expand Down
Expand Up @@ -15,32 +15,33 @@ import org.jetbrains.dokka.plugability.querySingle
import org.jetbrains.dokka.templates.TemplatingPlugin
import org.jetbrains.dokka.templates.TemplatingResult
import org.jetbrains.dokka.transformers.pages.CreationContext
import org.jetbrains.dokka.utilities.LoggingLevel

public class AllModulesPageGeneration(private val context: DokkaContext) : Generation {

private val allModulesPagePlugin by lazy { context.plugin<AllModulesPagePlugin>() }
private val templatingPlugin by lazy { context.plugin<TemplatingPlugin>() }

override fun Timer.generate() {
report("Processing submodules")
report("Processing submodules", LoggingLevel.DEBUG)
val generationContext = processSubmodules()

report("Creating all modules page")
report("Creating all modules page", LoggingLevel.DEBUG)
val pages = createAllModulesPage(generationContext)

report("Transforming pages")
report("Transforming pages", LoggingLevel.DEBUG)
val transformedPages = transformAllModulesPage(pages)

report("Rendering")
report("Rendering", LoggingLevel.DEBUG)
render(transformedPages)

report("Processing multimodule")
report("Processing multimodule", LoggingLevel.DEBUG)
processMultiModule(transformedPages)

report("Finish submodule processing")
report("Finish submodule processing", LoggingLevel.DEBUG)
finishProcessingSubmodules()

report("Running post-actions")
report("Running post-actions", LoggingLevel.DEBUG)
runPostActions()
}

Expand Down
Expand Up @@ -18,41 +18,42 @@ import org.jetbrains.dokka.plugability.DokkaContext
import org.jetbrains.dokka.plugability.plugin
import org.jetbrains.dokka.plugability.query
import org.jetbrains.dokka.transformers.sources.AsyncSourceToDocumentableTranslator
import org.jetbrains.dokka.utilities.LoggingLevel
import org.jetbrains.dokka.utilities.parallelMap
import org.jetbrains.dokka.utilities.report

public class SingleModuleGeneration(private val context: DokkaContext) : Generation {

override fun Timer.generate() {
report("Validity check")
report("Validity check", LoggingLevel.DEBUG)
validityCheck(context)

// Step 1: translate sources into documentables & transform documentables (change internally)
report("Creating documentation models")
report("Creating documentation models", LoggingLevel.DEBUG)
val modulesFromPlatforms = createDocumentationModels()

report("Transforming documentation model before merging")
report("Transforming documentation model before merging", LoggingLevel.DEBUG)
val transformedDocumentationBeforeMerge = transformDocumentationModelBeforeMerge(modulesFromPlatforms)

report("Merging documentation models")
report("Merging documentation models", LoggingLevel.DEBUG)
val transformedDocumentationAfterMerge = mergeDocumentationModels(transformedDocumentationBeforeMerge)
?: exitGenerationGracefully("Nothing to document")

report("Transforming documentation model after merging")
report("Transforming documentation model after merging", LoggingLevel.DEBUG)
val transformedDocumentation = transformDocumentationModelAfterMerge(transformedDocumentationAfterMerge)

// Step 2: Generate pages & transform them (change internally)
report("Creating pages")
report("Creating pages", LoggingLevel.DEBUG)
val pages = createPages(transformedDocumentation)

report("Transforming pages")
report("Transforming pages", LoggingLevel.DEBUG)
val transformedPages = transformPages(pages)

// Step 3: Rendering
report("Rendering")
report("Rendering", LoggingLevel.DEBUG)
render(transformedPages)

report("Running post-actions")
report("Running post-actions", LoggingLevel.DEBUG)
runPostActions()

reportAfterRendering()
Expand Down Expand Up @@ -109,7 +110,7 @@ public class SingleModuleGeneration(private val context: DokkaContext) : Generat

public fun reportAfterRendering() {
context.unusedPoints.takeIf { it.isNotEmpty() }?.also {
context.logger.info("Unused extension points found: ${it.joinToString(", ")}")
context.logger.warn("Unused extension points found: ${it.joinToString(", ")}")
}

context.logger.report()
Expand Down
Expand Up @@ -39,7 +39,7 @@ public class FileWriter(

private suspend fun checkFileCreated(path: String): Boolean = createdFilesMutex.withLock {
if (createdFiles.contains(path)) {
context.logger.error("An attempt to write ${root}/$path several times!")
context.logger.warn("An attempt to write ${root}/$path several times!")
return true
}
createdFiles.add(path)
Expand Down
Expand Up @@ -38,7 +38,7 @@ public class PackageListService(
val contentPage = node as? ContentPage
contentPage?.dri?.forEach { dri ->
val nodeLocation = locationProvider.resolve(node, context = module, skipExtension = true)
?: run { context.logger.error("Cannot resolve path for ${node.name}!"); null }
?: run { context.logger.warn("Cannot resolve path for ${node.name}!"); null }

if (dri != DRI.topLevel && locationProvider.expectedLocationForDri(dri) != nodeLocation) {
nonStandardLocations[dri.toString()] = "$nodeLocation.${format.linkExtension}"
Expand Down
Expand Up @@ -779,11 +779,11 @@ public open class HtmlRenderer(
block: FlowContent.() -> Unit
) {
locationProvider.resolve(to, platforms.toSet(), from)?.let { buildLink(it, block) }
?: run { context.logger.error("Cannot resolve path for `$to` from `$from`"); block() }
?: run { context.logger.warn("Cannot resolve path for `$to` from `$from`"); block() }
}

override fun buildError(node: ContentNode) {
context.logger.error("Unknown ContentNode type: $node")
context.logger.warn("Unknown ContentNode type: $node")
}

override fun FlowContent.buildLineBreak() {
Expand Down
Expand Up @@ -40,7 +40,7 @@ public abstract class DefaultLocationProvider(
externalLocationProviderFactories
.mapNotNull { it.getExternalLocationProvider(extDocInfo) }
.firstOrNull()
?: run { dokkaContext.logger.error("No ExternalLocationProvider for '${extDocInfo.packageList.url}' found"); null }
?: run { dokkaContext.logger.warn("No ExternalLocationProvider for '${extDocInfo.packageList.url}' found"); null }
}

protected val packagesIndex: Map<String, ExternalLocationProvider?> =
Expand Down
Expand Up @@ -120,7 +120,7 @@ internal class DefaultSamplesTransformer(val context: DokkaContext) : PageTransf
is PlatformHintedContent -> copy(inner.dfs(fqName, node))
is ContentText -> if (text == fqName) node else this
is ContentBreakLine -> this
else -> this.also { context.logger.error("Could not recognize $this ContentNode in SamplesTransformer") }
else -> this.also { context.logger.warn("Could not recognize $this ContentNode in SamplesTransformer") }
}
}

Expand Down
Expand Up @@ -29,7 +29,7 @@ public class SameMethodNamePageMergerStrategy(

val name = pages.first().name.also {
if (pages.any { page -> page.name != it }) { // Is this even possible?
logger.error("Page names for $it do not match!")
logger.warn("Page names for $it do not match!")
}
}
val dri = members.flatMap { it.dri }.toSet()
Expand Down