diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index f677cde6b4aa1..cf91960278f5f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -7,37 +7,47 @@ assignees: '' --- -**Describe the bug** +## Describe the bug (Describe the problem clearly and concisely.) -**Expected behavior** +### Expected behavior (Describe the expected behavior clearly and concisely.) -**Actual behavior** +### Actual behavior (Describe the actual behavior clearly and concisely.) -**To Reproduce** +## To Reproduce + +Link to a small reproducer (preferably a Maven project if the issue is not Gradle-specific). + +Or attach an archive containing the reproducer to the issue. + Steps to reproduce the behavior: 1. 2. 3. -**Configuration** +### Configuration ```properties # Add your application.properties here, if applicable. ``` -**Screenshots** +### Screenshots (If applicable, add screenshots to help explain your problem.) -**Environment (please complete the following information):** - - Output of `uname -a` or `ver`: - - Output of `java -version`: - - GraalVM version (if different from Java): - - Quarkus version or git rev: - - Build tool (ie. output of `mvnw --version` or `gradlew --version`): +## Environment (please complete the following information): + +### Output of `uname -a` or `ver` + +### Output of `java -version` + +### GraalVM version (if different from Java) + +### Quarkus version or git rev + +### Build tool (ie. output of `mvnw --version` or `gradlew --version`) -**Additional context** +## Additional context (Add any other context about the problem here.) diff --git a/.github/ISSUE_TEMPLATE/ecosystem-ci-participant-issue.md b/.github/ISSUE_TEMPLATE/ecosystem-ci-participant-issue.md index b0696ffae84a7..db8a31693ed67 100644 --- a/.github/ISSUE_TEMPLATE/ecosystem-ci-participant-issue.md +++ b/.github/ISSUE_TEMPLATE/ecosystem-ci-participant-issue.md @@ -7,6 +7,6 @@ assignees: '' --- -This issue will be open and closed dependent on the state of building against Quarkus master snapshot. +This issue will be open and closed dependent on the state of building against Quarkus main snapshot. If you have interest in being notified of this subscribe to the issue. diff --git a/.github/ISSUE_TEMPLATE/extension_proposal.md b/.github/ISSUE_TEMPLATE/extension_proposal.md index 2c464154e2940..4882211ae6870 100644 --- a/.github/ISSUE_TEMPLATE/extension_proposal.md +++ b/.github/ISSUE_TEMPLATE/extension_proposal.md @@ -7,16 +7,16 @@ assignees: '' --- -**Describe the extension** +## Describe the extension (Describe the extension clearly and concisely.) Interested in this extension, please +1 via the emoji/reaction feature of GitHub (top right). -**Configuration suggestion** +## Configuration suggestion ```properties # Add your application.properties here, if applicable. ``` -**Additional context** +## Additional context (Add any other context about the proposal here.) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 37573d1a87ab4..702491740993c 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -7,8 +7,10 @@ assignees: '' --- -**Description** +## Description + (Describe the feature here.) -**Implementation ideas** -(If you have any implementation ideas, they can go here, however please note that all design change proposals should be posted to [the Quarkus developer mailing list](mailto:quarkus-dev@googlegroups.com) (or [the corresponding Google Group](https://groups.google.com/group/quarkus-dev); see [the decisions process document](https://github.com/quarkusio/quarkus/blob/master/DECISIONS.adoc) for more information). +## Implementation ideas + +(If you have any implementation ideas, they can go here, however please note that all design change proposals should be posted to [the Quarkus developer mailing list](mailto:quarkus-dev@googlegroups.com) (or [the corresponding Google Group](https://groups.google.com/group/quarkus-dev); see [the decisions process document](https://github.com/quarkusio/quarkus/blob/main/DECISIONS.adoc) for more information). diff --git a/.github/ISSUE_TEMPLATE/housekeeping.md b/.github/ISSUE_TEMPLATE/housekeeping.md index 88487e4539ef6..3c72c9fd972d4 100644 --- a/.github/ISSUE_TEMPLATE/housekeeping.md +++ b/.github/ISSUE_TEMPLATE/housekeeping.md @@ -7,8 +7,8 @@ assignees: '' --- -**Description** +## Description (Describe the task here.) -**Implementation ideas** +## Implementation ideas (If you have any implementation ideas, they can go here.) diff --git a/.github/NativeBuildReport.java b/.github/NativeBuildReport.java index 6f0e6743d23e1..56006a77bad87 100644 --- a/.github/NativeBuildReport.java +++ b/.github/NativeBuildReport.java @@ -3,7 +3,12 @@ //DEPS org.kohsuke:github-api:1.101 //DEPS info.picocli:picocli:4.2.0 -import org.kohsuke.github.*; +import org.kohsuke.github.GitHub; +import org.kohsuke.github.GitHubBuilder; +import org.kohsuke.github.GHRepository; +import org.kohsuke.github.GHIssue; +import org.kohsuke.github.GHIssueComment; +import org.kohsuke.github.GHIssueState; import picocli.CommandLine; import picocli.CommandLine.Command; import picocli.CommandLine.Option; @@ -87,9 +92,9 @@ public void run() { private static boolean isOpen(GHIssue issue) { return (issue.getState() == GHIssueState.OPEN); } - + public static void main(String... args) { int exitCode = new CommandLine(new Report()).execute(args); System.exit(exitCode); } -} +} \ No newline at end of file diff --git a/.github/autoissuelabeler.yml b/.github/autoissuelabeler.yml deleted file mode 100644 index e3b1ef08dbd53..0000000000000 --- a/.github/autoissuelabeler.yml +++ /dev/null @@ -1,152 +0,0 @@ -## docs: https://github.com/maxandersen/jbang-issuelabeler -- labels: [area/amazon-lambda] - title: "(?i).*lambda.*" - description: "(?i).*lambda.*" - notify: [patriot1burke] -- labels: [area/persistence] - title: "(?i).*DB2.*" - description: "(?si).*DB2.*" - notify: [aguibert] -- labels: [area/funqy] - title: "(?i).*funqy.*" - description: "(?i).*funqy.*" - notify: [patriot1burke] -- labels: [area/devmode] - title: "(?i).*dev mode.*" - description: "(?i).*dev mode.*" -- labels: [area/gradle] - title: "(?i).*gradle.*" - notify: [quarkusio/devtools] -- labels: [area/maven] - title: "(?i).*maven.*" - notify: [quarkusio/devtools] -- labels: [area/hibernate-reactive, area/persistence] - title: "(?i).*hibernate reactive.*" - description: "(?i).*hibernate reactive.*" - notify: [aguibert,gavinking,Sanne] -- labels: [area/hibernate-orm, area/persistence] - expression: | - title_description.matches("(?is).*hibernate.*") - and - !title_description.matches("(?is).*validator.*") - notify: [gsmet,Sanne] -- labels: [area/hibernate-search] - expression: title_description.matches("(?is).*hibernate search.*") - notify: [gsmet,yrodiere] -- labels: [area/elasticsearch] - expression: title_description.matches("(?is).*elasticsearch.*") - notify: [gsmet,yrodiere,loicmathieu] -- labels: [area/hibernate-validator] - title: "(?i).*hibernate validator.*" - description: "(?i).*hibernate validator.*" - notify: [gsmet] -- labels: [area/jaeger] - title: "(?i).*jaeger.*" - description: "(?i).*jaeger.*" - notify: [kenfinnigan,pavolloffay] -- labels: [area/kotlin] - title: "(?i).*kotlin.*" - description: "(?i).*kotlin.*" -- labels: [area/mongodb] - title: "(?i).*mongo.*" - description: "(?i).*mongo.*" - notify: [loicmathieu] -- labels: [area/openapi, area/smallrye] - title: "(?i).*openapi.*" - description: "(?i).*openapi.*" - notify: [EricWittmann,MikeEdgar,phillip-kruger] -- labels: [area/graphql, area/smallrye] - title: "(?i).*graphql.*" - description: "(?i).*graphql.*" - notify: [phillip-kruger,jmartisk] -- labels: [area/tracing, area/smallrye] - title: "(?i).*(trace|tracing|telemetry).*" - description: "(?i).*(trace|tracing|telemetry).*" - notify: [kenfinnigan,Ladicek] -- labels: [area/security, area/smallrye] - title: "(?i).*jwt.*" - description: "(?i).*jwt.*" - notify: [radcortez] -- labels: [area/metrics, area/smallrye] - title: "(?i).*metrics.*" - description: "(?i).*metrics.*" - notify: [jmartisk] -- labels: [area/health, area/smallrye] - title: "(?i).*health.*" - description: "(?i).*health.*" - notify: [jmartisk,xstefank] -- labels: [area/fault-tolerance, area/smallrye] - title: "(?i).*fault.tolerance.*" - description: "(?i).*fault.tolerance.*" - notify: [Ladicek] -- labels: [area/mutiny, area/smallrye] - title: "(?i).*mutiny.*" - description: "(?i).*mutiny.*" - notify: [cescoffier] -- labels: [area/panache] - title: "(?i).*panache.*" - description: "(?i).*panache.*" - notify: [FroMage,loicmathieu] -- labels: [area/qute] - title: "(?i).*qute.*" - description: "(?i).*qute.*" - notify: [mkouba] -- labels: [area/reactive-messaging,area/smallrye] - title: "(?i).*reactive.messaging.*" - description: "(?i).*reactive.messaging.*" - notify: [cescoffier,kenfinnigan,phillip-kruger] -- labels: [area/rest-client] - title: "(?i).*rest.client.*" - description: "(?i).*rest.client.*" - notify: [phillip-kruger] -- labels: [area/smallrye] - title: "(?i).*smallrye.*" - description: "(?i).*smallrye.*" - notify: [kenfinnigan,phillip-kruger,jmartisk,radcortez,Ladicek] -- labels: [area/spring] - title: "(?i).*spring.*" - description: "(?i).*spring.*" - notify: [geoand] -- labels: [env/windows] - title: "(?i).*windows.*" - description: "(?i).*windows.*" -- labels: [area/kubernetes] - title: "(?i).*kubernetes.*" - description: "(?i).*kubernetes.*" - notify: [geoand] -- labels: [area/kubernetes] - title: "(?i).*minikube.*" - description: "(?i).*minikube.*" - notify: [geoand] -- labels: [area/kubernetes] - title: "(?i).*openshift.*" - description: "(?i).*openshift.*" - notify: [geoand] -- labels: [area/container-image] - title: "(?i).*jib.*" - description: "(?i).*jib.*" - notify: [geoand] -- labels: [area/kafka-streams] - title: "(?i).*k(afka)?(\\s|-)?stream.*" - description: "(?i).*k(afka)?(\\s|-)?stream.*" - notify: [gunnarmorling] -- labels: [area/infinispan] - title: "(?i).*infinispan.*" - description: "(?i).*infinispan.*" - notify: [karesti,wburns] -- labels: [area/grpc] - title: "(?i).*grpc.*" - description: "(?i).*grpc.*" - notify: [michalszynkiewicz,cescoffier] -- labels: [area/scheduler] - title: "(?i).*schedule(r)?.*" - description: "(?i).*schedule(r)?.*" - notify: [mkouba] -- labels: [area/scheduler] - title: "(?i).*quartz.*" - description: "(?i).*quartz.*" - notify: [mkouba,machi1990] -- labels: [area/redis] - title: "(?i).*redis.*" - description: "(?i).*redis.*" - notify: [machi1990, gsmet, cescoffier] diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml deleted file mode 100644 index 6b7056427a74c..0000000000000 --- a/.github/boring-cyborg.yml +++ /dev/null @@ -1,290 +0,0 @@ -### labeler.yaml -### This is a best naive guess on auto labeling -### with this script: https://repl.it/@maxandersen/labels -### should be verified/tuned by changing the script and once -### committed updates would be done manually. - -### you can test this script by running the following in quarkus folder: -### jbang https://git.io/JvJox .github/labeler.yml - -### Non-covered dirs.... -### - extensions -### - extensions/agroal -### - extensions/amazon-services -### - extensions/azure-functions-http -### - extensions/caffeine -### - extensions/flyway -### - extensions/jackson -### - extensions/jdbc -### - extensions/jgit -### - extensions/jsch -### - extensions/jsonb -### - extensions/jsonp -### - extensions/keycloak-authorization -### - extensions/mailer -### - extensions/netty -### - extensions/pom.xml -### - integration-tests -### - integration-tests/.project -### - integration-tests/.settings -### - integration-tests/amazon-services -### - integration-tests/class-transformer -### - integration-tests/flyway -### - integration-tests/jackson -### - integration-tests/jgit -### - integration-tests/jsch -### - integration-tests/jsonb -### - integration-tests/keycloak -### - integration-tests/keycloak-authorization -### - integration-tests/main -### - integration-tests/pom.xml -### - integration-tests/shared-library -### - integration-tests/test-extension -### - integration-tests/virtual-http - -### Non-covered area/ -### area/bootstrap-maven -### area/build -### area/camel -### area/core -### area/dev-mode -### area/housekeeping -### area/infra -### area/new-http -### area/oidc-interoperability -### area/openapi -### area/platform -### area/reactive-messaging -### area/security-consolidation -### area/user experience -### area/amazon-lambda: -labelPRBasedOnFilePath: - area/amazon-lambda: - - extensions/amazon-lambda/**/* - - extensions/amazon-lambda-http/**/* - - integration-tests/amazon-lambda/**/* - - integration-tests/amazon-lambda-http/**/* - - integration-tests/amazon-lambda-http-it/**/* - - integration-tests/amazon-lambda-http-resteasy/**/* - area/arc: - - extensions/arc/**/* - - independent-projects/arc/**/* - area/artemis: - - extensions/artemis-core/**/* - - extensions/artemis-jms/**/* - - integration-tests/artemis-core/**/* - - integration-tests/artemis-jms/**/* - area/cache: - - extensions/cache/**/* - - integration-tests/cache/**/* - area/config: - - extensions/config-yaml/**/* - - core/deployment/src/main/java/io/quarkus/deployment/configuration/**/* - - core/runtime/src/main/java/io/quarkus/runtime/configuration/**/* - area/core: - - core/**/* - area/dependencies: - - .github/dependabot.yml - - bom/**/* - - build-parent/**/* - area/devtools: - - devtools/**/* - - independent-projects/bootstrap/**/* - - independent-projects/tools/**/* - area/documentation: - - docs/**/* - area/funqy: - - extensions/funqy/**/* - area/gradle: - - devtools/gradle/**/* - - integration-tests/gradle/**/* - area/hibernate-orm: - - extensions/hibernate-orm/**/* - - extensions/spring-data-jpa/**/* - - integration-tests/common-jpa-entities/**/* - - integration-tests/hibernate-orm-panache/**/* - - integration-tests/infinispan-cache-jpa/**/* - - integration-tests/jpa-derby/**/* - - integration-tests/jpa-h2/**/* - - integration-tests/jpa-mariadb/**/* - - integration-tests/jpa-mssql/**/* - - integration-tests/jpa-mysql/**/* - - integration-tests/jpa-postgresql/**/* - - integration-tests/jpa-without-entity/**/* - - integration-tests/jpa/**/* - - integration-tests/spring-data-jpa/**/* - - area/hibernate-search: - - extensions/hibernate-search-elasticsearch/**/* - - integration-tests/hibernate-search-elasticsearch/**/* - - extensions/elasticsearch-rest-client/**/* - - area/hibernate-validator: - - extensions/hibernate-validator/**/* - - integration-tests/hibernate-validator/**/* - area/infinispan: - - extensions/infinispan-client/**/* - - integration-tests/infinispan-cache-jpa/**/* - - integration-tests/infinispan-client/**/* - area/infra-automation: - - .github/**/* - area/jaeger: - - extensions/jaeger/**/* - area/jaxb: - - extensions/jaxb/**/* - area/kafka: - - extensions/kafka-client/**/* - - integration-tests/kafka/**/* - area/kafka-streams: - - extensions/kafka-streams/**/* - - integration-tests/kafka-streams/**/* - area/kotlin: - - extensions/kotlin/**/* - - integration-tests/kotlin/**/* - area/kubernetes: - - extensions/kubernetes/**/* - - extensions/kubernetes-client/**/* - - integration-tests/kubernetes/**/* - - integration-tests/kubernetes-client/**/* - area/logging: - - extensions/logging-gelf/**/* - - extensions/logging-json/**/* - - extensions/logging-sentry/**/* - - integration-tests/logging-gelf/**/* - - core/runtime/src/main/java/io/quarkus/runtime/logging/**/* - - core/deployment/src/main/java/io/quarkus/logging/**/* - area/maven: - - integration-tests/maven/**/* - - independent-projects/bootstrap/maven-plugin/**/* - - integration-tests/maven/**/* - - test-framework/maven/**/* - - devtools/maven/**/* - area/mongodb: - - extensions/mongodb-client/**/* - - integration-tests/mongodb-client/**/* - - integration-tests/mongodb-panache/**/* - - extensions/panache/mongodb-panache/**/* - area/narayana: - - extensions/narayana-jta/**/* - - extensions/narayana-stm/**/* - - integration-tests/narayana-jta/**/* - - integration-tests/narayana-stm/**/* - area/neo4j: - - extensions/neo4j/**/* - - integration-tests/neo4j/**/* - area/oidc: - - extensions/oidc/**/* - - integration-tests/oidc/**/* - - integration-tests/oidc-code-flow/**/* - area/panache: - - extensions/panache/**/* - area/persistence: - - integration-tests/jpa/**/* - - integration-tests/jpa-derby/**/* - - integration-tests/jpa-h2/**/* - - integration-tests/jpa-mariadb/**/* - - integration-tests/jpa-mssql/**/* - - integration-tests/jpa-mysql/**/* - - integration-tests/jpa-postgresql/**/* - - integration-tests/jpa-without-entity/**/* - - integration-tests/common-jpa-entities/**/* - area/platform: - - independent-projects/tools/**/* - area/qute: - - extensions/qute/**/* - area/reactive: - - extensions/reactive-mysql-client/**/* - - extensions/reactive-pg-client/**/* - - extensions/reactive-streams-operators/**/* - - integration-tests/reactive-mysql-client/**/* - - integration-tests/reactive-pg-client/**/* - area/reactive-messaging: - - extensions/smallrye-reactive-messaging-amqp - - extensions/smallrye-reactive-messaging - - extensions/smallrye-reactive-messaging-mqtt - - extensions/smallrye-reactive-messaging-kafka - area/rest-client: - - extensions/rest-client/**/* - area/resteasy: - - extensions/resteasy/**/* - - extensions/resteasy-common/**/* - - extensions/resteasy-jackson/**/* - - extensions/resteasy-jaxb/**/* - - extensions/resteasy-jsonb/**/* - - extensions/resteasy-qute/**/* - - extensions/resteasy-server-common/**/* - - integration-tests/resteasy-jackson/**/* - - integration-tests/elytron-resteasy/**/* - - integration-tests/virtual-http-resteasy/**/* - area/scala: - - extensions/scala/**/* - - integration-tests/scala/**/* - area/scheduler: - - extensions/scheduler/**/* - - extensions/quartz/**/* - - integration-tests/quartz/**/* - area/security: - - extensions/security/**/* - - extensions/elytron-security/**/* - - extensions/elytron-security-common/**/* - - extensions/elytron-security-jdbc/**/* - - extensions/elytron-security-oauth2/**/* - - extensions/elytron-security-properties-file/**/* - - extensions/vertx-keycloak/**/* - - integration-tests/elytron-security/**/* - - integration-tests/elytron-security-jdbc/**/* - - integration-tests/elytron-security-oauth2/**/* - - area/smallrye: - - extensions/smallrye-context-propagation/**/* - - extensions/smallrye-fault-tolerance/**/* - - extensions/smallrye-health/**/* - - extensions/smallrye-jwt/**/* - - extensions/smallrye-metrics/**/* - - extensions/smallrye-openapi/**/* - - extensions/smallrye-openapi-common/**/* - - extensions/smallrye-opentracing/**/* - - extensions/smallrye-reactive-messaging/**/* - - extensions/smallrye-reactive-messaging-amqp/**/* - - extensions/smallrye-reactive-messaging-kafka/**/* - - extensions/smallrye-reactive-messaging-mqtt/**/* - area/spring: - - extensions/spring-cloud-config-client/**/* - - extensions/spring-data-jpa/**/* - - extensions/spring-di/**/* - - extensions/spring-security/**/* - - extensions/spring-web/**/* - - integration-tests/spring-cloud-config-client/**/* - - integration-tests/spring-data-jpa/**/* - - integration-tests/spring-di/**/* - - integration-tests/spring-web/**/* - area/swagger-ui: - - extensions/swagger-ui/**/* - area/testing: - - test-framework/**/* - area/tika: - - extensions/tika/**/* - - integration-tests/tika/**/* - area/undertow: - - extensions/undertow/**/* - - integration-tests/elytron-undertow/**/* - area/undertow-websockets: - - extensions/undertow-websockets/**/* - area/vault: - - extensions/vault/**/* - - integration-tests/vault/**/* - - integration-tests/vault-agroal/**/* - - integration-tests/vault-app/**/* - area/vertx: - - extensions/vertx/**/* - - extensions/vertx-core/**/* - - extensions/vertx-graphql/**/* - - extensions/vertx-http/**/* - - extensions/vertx-keycloak/**/* - - extensions/vertx-web/**/* - - integration-tests/vertx/**/* - - integration-tests/vertx-graphql/**/* - - integration-tests/vertx-http/**/* - area/redis: - - extensions/redis-client/**/* - - integration-tests/redis-client/**/* \ No newline at end of file diff --git a/.github/ci-prerequisites.sh b/.github/ci-prerequisites.sh index 247ac0803d807..0e9237f523ef9 100755 --- a/.github/ci-prerequisites.sh +++ b/.github/ci-prerequisites.sh @@ -1,7 +1,7 @@ # Reclaim disk space, otherwise we only have 13 GB free at the start of a job -docker rmi node:10 node:12 mcr.microsoft.com/azure-pipelines/node8-typescript:latest +time docker rmi node:10 node:12 mcr.microsoft.com/azure-pipelines/node8-typescript:latest # That is 18 GB -sudo rm -rf /usr/share/dotnet +time sudo rm -rf /usr/share/dotnet # That is 1.2 GB -sudo rm -rf /usr/share/swift +time sudo rm -rf /usr/share/swift diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5fef3552ad559..7e705b628da6a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,36 +4,31 @@ updates: directory: "/" schedule: interval: daily - time: "21:00" + time: "23:00" timezone: Europe/Paris open-pull-requests-limit: 3 labels: - area/dependencies allow: - - dependency-name: org.apache.activemq:artemis-core-client - - dependency-name: org.apache.activemq:artemis-jms-client - - dependency-name: org.apache.activemq:artemis-server - - dependency-name: org.apache.activemq:artemis-commons + - dependency-name: org.jboss:jboss-parent + - dependency-name: org.apache.activemq:* - dependency-name: org.flywaydb:flyway-core - dependency-name: org.liquibase:liquibase-core - dependency-name: org.freemarker:freemarker - - dependency-name: org.eclipse.jgit:org.eclipse.jgit - - dependency-name: org.eclipse.jgit:org.eclipse.jgit.http.server + - dependency-name: org.eclipse.jgit:* - dependency-name: io.fabric8:kubernetes-client-bom - - dependency-name: org.apache.httpcomponents:httpclient - - dependency-name: org.apache.httpcomponents:httpasyncclient - - dependency-name: org.apache.httpcomponents:httpcore + - dependency-name: org.apache.httpcomponents:* - dependency-name: org.quartz-scheduler:quartz - dependency-name: com.cronutils:cron-utils - dependency-name: org.eclipse:yasson - dependency-name: org.yaml:snakeyaml + - dependency-name: com.google.guava:guava + - dependency-name: com.vackosar.gitflowincrementalbuilder:gitflow-incremental-builder # Quarkus - dependency-name: io.quarkus.gizmo:gizmo - - dependency-name: io.quarkus.http:quarkus-http-vertx-backend - - dependency-name: io.quarkus.http:quarkus-http-core - - dependency-name: io.quarkus.http:quarkus-http-servlet - - dependency-name: io.quarkus.http:quarkus-http-websockets-jsr + - dependency-name: io.quarkus.http:* # Elytron + - dependency-name: org.wildfly.security:wildfly-elytron - dependency-name: org.wildfly.security:wildfly-elytron-* # JDBC Drivers - dependency-name: org.postgresql:postgresql @@ -41,16 +36,12 @@ updates: - dependency-name: mysql:mysql-connector-java - dependency-name: org.apache.derby:derbyclient # Kafka - - dependency-name: org.apache.kafka:kafka-clients - - dependency-name: org.apache.kafka:kafka-streams - - dependency-name: org.apache.kafka:kafka_2.12 + - dependency-name: org.apache.kafka:* - dependency-name: org.apache.zookeeper:zookeeper # Debezium - dependency-name: io.debezium:debezium-core # Scala - - dependency-name: org.scala-lang:scala-reflect - - dependency-name: org.scala-lang:scala-library - - dependency-name: org.scala-lang:scala-compiler + - dependency-name: org.scala-lang:* - dependency-name: net.alchim31.maven:scala-maven-plugin # SmallRye - dependency-name: io.smallrye:smallrye-jwt @@ -60,8 +51,7 @@ updates: - dependency-name: io.smallrye:smallrye-opentracing - dependency-name: io.smallrye:smallrye-fault-tolerance - dependency-name: io.smallrye:smallrye-context-propagation - - dependency-name: io.smallrye.common:smallrye-common-annotation - - dependency-name: io.smallrye.common:smallrye-common-io + - dependency-name: io.smallrye.common:smallrye-common-bom - dependency-name: io.smallrye.config:smallrye-config - dependency-name: io.smallrye.reactive:mutiny - dependency-name: io.smallrye.reactive:smallrye-reactive-messaging @@ -72,16 +62,15 @@ updates: # RX Java 2 - dependency-name: io.reactivex.rxjava2:rxjava # Test dependencies - - dependency-name: io.rest-assured:rest-assured - - dependency-name: io.rest-assured:json-schema-validator + - dependency-name: io.rest-assured:* - dependency-name: org.junit:junit-bom + - dependency-name: org.junit.jupiter:* - dependency-name: org.assertj:assertj-core - - dependency-name: org.testcontainers:testcontainers-bom - - dependency-name: org.testcontainers:testcontainers - - dependency-name: org.testcontainers:postgresql - - dependency-name: org.mockito:mockito-core - - dependency-name: org.mockito:mockito-junit-jupiter + - dependency-name: org.testcontainers:* + - dependency-name: org.mockito:* - dependency-name: org.awaitility:awaitility + - dependency-name: com.thoughtworks.xstream:xstream + - dependency-name: org.jacoco:* # Maven plugins - dependency-name: net.revelc.code.formatter:formatter-maven-plugin - dependency-name: net.revelc.code:impsort-maven-plugin @@ -90,9 +79,7 @@ updates: - dependency-name: org.jboss.narayana.jts:narayana-jts-integration - dependency-name: org.jboss.narayana.stm:stm # Agroal - - dependency-name: io.agroal:agroal-api - - dependency-name: io.agroal:agroal-pool - - dependency-name: io.agroal:agroal-narayana + - dependency-name: io.agroal:* #Jandex - dependency-name: org.jboss:jandex - dependency-name: org.jboss.jandex:jandex-maven-plugin @@ -102,17 +89,25 @@ updates: - dependency-name: com.github.tomakehurst:wiremock-jre8 - dependency-name: uk.co.automatictester:wiremock-maven-plugin #Picocli - - dependency-name: info.picocli:picocli - - dependency-name: info.picocli:picocli-codegen + - dependency-name: info.picocli:* # Caffeine - dependency-name: com.github.ben-manes.caffeine:caffeine # Jackson - dependency-name: com.fasterxml.jackson:jackson-bom # AWS - - dependency-name: software.amazon.awssdk:bom - - dependency-name: software.amazon.awssdk:apache-client - - dependency-name: com.amazon.alexa:ask-sdk - - dependency-name: com.amazon.alexa:ask-sdk-apache-client + - dependency-name: software.amazon.awssdk:* + - dependency-name: com.amazon.alexa:* + - dependency-name: com.amazonaws:* + # Apache Commons + - dependency-name: commons-io:commons-io + - dependency-name: org.apache.commons:commons-lang3 + # Micrometer + - dependency-name: io.micrometer:micrometer-bom + # BouncyCastle + - dependency-name: org.bouncycastle:* + # Logstash + - dependency-name: biz.paluch.logging:logstash-gelf + - dependency-name: io.sentry:* rebase-strategy: disabled - package-ecosystem: gradle directory: "/integration-tests/gradle" diff --git a/.github/docker-prune.bat b/.github/docker-prune.bat new file mode 100644 index 0000000000000..0b37a66977cfc --- /dev/null +++ b/.github/docker-prune.bat @@ -0,0 +1,9 @@ +@echo off + +rem See remarks in docker-prune.sh +if "%GITHUB_ACTIONS%"== "true" ( + docker container prune -f || exit /b + docker image prune -f || exit /b + docker network prune -f || exit /b + docker volume prune -f || exit /b +) diff --git a/.github/mvn-settings.xml b/.github/mvn-settings.xml index f416bf826c6d0..80f8b1887affd 100644 --- a/.github/mvn-settings.xml +++ b/.github/mvn-settings.xml @@ -1,39 +1,46 @@ - + + + + + sonatype-nexus-snapshots + ${env.SERVER_USERNAME} + ${env.SERVER_PASSWORD} + + + - google-mirror-jboss-proxy + google-mirror google-maven-central GCS Maven Central mirror EU - https://maven-central-eu.storage-download.googleapis.com/repos/central/data/ - - - jboss-maven-central-proxy - JBoss Maven Central proxy - https://repository.jboss.org/nexus/content/repositories/central/ - - - gradle-official-repository - Gradle Official Repository - https://repo.gradle.org/gradle/libs-releases-local/ + https://maven-central.storage-download.googleapis.com/maven2/ + + true + + + false + google-maven-central GCS Maven Central mirror EU - https://maven-central-eu.storage-download.googleapis.com/repos/central/data/ - - - jboss-maven-central-proxy - JBoss Maven Central proxy - https://repository.jboss.org/nexus/content/repositories/central/ + https://maven-central.storage-download.googleapis.com/maven2/ + + true + + + false + - google-mirror-jboss-proxy + google-mirror diff --git a/.github/native-tests.json b/.github/native-tests.json new file mode 100644 index 0000000000000..542c5acc3e5a3 --- /dev/null +++ b/.github/native-tests.json @@ -0,0 +1,126 @@ +{ + "category": [ + "Main", + "Data1", + "Data2", + "Data3", + "Data4", + "Data5", + "Data6", + "Security1", + "Security2", + "Security3", + "Amazon", + "Messaging", + "Cache", + "HTTP", + "Misc1", + "Misc2", + "Misc3", + "Misc4", + "Spring", + "gRPC" + ], + "include": [ + { + "category": "Main", + "timeout": 40, + "test-modules": "main" + }, + { + "category": "Data1", + "timeout": 65, + "test-modules": "jpa-h2 jpa-mariadb jpa-mssql jpa-derby jpa-without-entity hibernate-tenancy" + }, + { + "category": "Data2", + "timeout": 65, + "test-modules": "jpa jpa-mysql jpa-db2 reactive-mysql-client reactive-db2-client hibernate-reactive-db2 hibernate-reactive-mysql" + }, + { + "category": "Data3", + "timeout": 70, + "test-modules": "flyway hibernate-orm-panache hibernate-orm-panache-kotlin hibernate-orm-envers liquibase" + }, + { + "category": "Data4", + "timeout": 55, + "test-modules": "mongodb-client mongodb-panache mongodb-rest-data-panache mongodb-panache-kotlin redis-client neo4j hibernate-orm-rest-data-panache" + }, + { + "category": "Data5", + "timeout": 65, + "test-modules": "jpa-postgresql narayana-stm narayana-jta reactive-pg-client hibernate-reactive-postgresql" + }, + { + "category": "Data6", + "timeout": 50, + "test-modules": "elasticsearch-rest-client elasticsearch-rest-high-level-client hibernate-search-orm-elasticsearch hibernate-reactive-panache" + }, + { + "category": "Amazon", + "timeout": 45, + "test-modules": "amazon-services amazon-lambda amazon-lambda-http" + }, + { + "category": "Messaging", + "timeout": 120, + "test-modules": "artemis-core artemis-jms kafka kafka-avro kafka-snappy kafka-streams reactive-messaging-amqp reactive-messaging-kafka reactive-messaging-http" + }, + { + "category": "Security1", + "timeout": 50, + "test-modules": "elytron-security-oauth2 elytron-security elytron-security-jdbc elytron-undertow elytron-security-ldap bouncycastle bouncycastle-jsse bouncycastle-fips" + }, + { + "category": "Security2", + "timeout": 70, + "test-modules": "oidc oidc-code-flow oidc-tenancy keycloak-authorization oidc-client oidc-token-propagation smallrye-jwt-token-propagation oidc-wiremock oidc-client-wiremock" + }, + { + "category": "Security3", + "timeout": 50, + "test-modules": "vault vault-app vault-agroal" + }, + { + "category": "Cache", + "timeout": 55, + "test-modules": "infinispan-cache-jpa infinispan-client cache" + }, + { + "category": "HTTP", + "timeout": 65, + "test-modules": "elytron-resteasy resteasy-jackson resteasy-mutiny vertx vertx-http vertx-web vertx-graphql virtual-http rest-client" + }, + { + "category": "Misc1", + "timeout": 65, + "test-modules": "maven jackson jsonb jsch jgit quartz qute consul-config logging-min-level-unset logging-min-level-set" + }, + { + "category": "Misc2", + "timeout": 65, + "test-modules": "tika hibernate-validator test-extension logging-gelf bootstrap-config mailer native-config-profile" + }, + { + "category": "Misc3", + "timeout": 65, + "test-modules": "kubernetes-client openshift-client" + }, + { + "category": "Misc4", + "timeout": 65, + "test-modules": "smallrye-config smallrye-graphql picocli-native gradle micrometer-mp-metrics micrometer-prometheus smallrye-metrics logging-json jaxp" + }, + { + "category": "Spring", + "timeout": 55, + "test-modules": "spring-di spring-web spring-data-jpa spring-boot-properties spring-cloud-config-client spring-data-rest" + }, + { + "category": "gRPC", + "timeout": 65, + "test-modules": "grpc-health grpc-interceptors grpc-mutual-auth grpc-plain-text-gzip grpc-plain-text-mutiny grpc-proto-v2 grpc-streaming grpc-tls" + } + ] +} diff --git a/.github/quarkus-bot.yml b/.github/quarkus-bot.yml new file mode 100644 index 0000000000000..2eda1651cb9be --- /dev/null +++ b/.github/quarkus-bot.yml @@ -0,0 +1,439 @@ +--- +# The format of this file is documented here: +# https://github.com/quarkusio/quarkus-bot#triage-issues +triage: + rules: + - labels: [area/amazon-lambda] + title: "lambda" + notify: [patriot1burke, matejvasek] + directories: + - extensions/amazon-lambda + - integration-tests/amazon-lambda + - labels: [area/persistence] + title: "db2" + notify: [mswatosh] + directories: + - extensions/reactive-db2-client/ + - extensions/jdbc/jdbc-db2/ + - labels: [area/funqy] + titleBody: "funqy" + notify: [patriot1burke, matejvasek] + directories: + - extensions/funqy/ + - labels: [area/devmode] + title: "dev mode" + - labels: [area/gradle] + title: "gradle" + notify: [quarkusio/devtools, glefloch] + directories: + - devtools/gradle/ + - integration-tests/gradle/ + - labels: [area/maven] + title: "maven" + notify: [quarkusio/devtools] + directories: + - devtools/maven/ + - integration-tests/maven/ + - independent-projects/bootstrap/maven-plugin/ + - integration-tests/maven/ + - test-framework/maven/ + - labels: [area/jbang] + titleBody: "jbang" + notify: [quarkusio/devtools, maxandersen] + directories: + - "**/*JBang*" + - "**/*jbang*" + - labels: [area/codestarts] + title: "codestart" + notify: [quarkusio/devtools, ia3andy] + directories: + - independent-projects/tools/codestarts/ + - devtools/platform-descriptor-json/src/main/resources/bundled-codestarts/ + - devtools/platform-descriptor-json/src/main/resources/codestarts/ + - devtools/platform-descriptor-json/src/main/resources/templates/ + - labels: [area/hibernate-reactive, area/persistence] + title: "hibernate.reactive" + notify: [DavideD, gavinking, Sanne] + directories: + - extensions/hibernate-reactive + - labels: [area/hibernate-orm, area/persistence] + expression: | + matches("hibernate", title) + && !matches("hibernate.validator", title) + && !matches("hibernate.search", title) + && !matches("hibernate.reactive", title) + notify: [gsmet, Sanne, yrodiere] + directories: + - extensions/hibernate-orm/ + - integration-tests/common-jpa-entities/ + - integration-tests/jpa-derby/ + - integration-tests/jpa-h2/ + - integration-tests/jpa-mariadb/ + - integration-tests/jpa-mssql/ + - integration-tests/jpa-mysql/ + - integration-tests/jpa-postgresql/ + - integration-tests/jpa-without-entity/ + - integration-tests/jpa/ + - integration-tests/infinispan-cache-jpa/ + - labels: [area/hibernate-search] + title: "hibernate.search" + notify: [gsmet, yrodiere] + directories: + - extensions/hibernate-search-orm-elasticsearch/ + - integration-tests/hibernate-search-orm-elasticsearch/ + - extensions/elasticsearch-rest-client/ + - extensions/elasticsearch-rest-client-common/ + - labels: [area/elasticsearch] + title: "elasticsearch" + notify: [gsmet, yrodiere, loicmathieu] + directories: + - extensions/elasticsearch + - integration-tests/elasticsearch + - labels: [area/hibernate-validator] + title: "hibernate.validator" + notify: [gsmet, yrodiere] + directories: + - extensions/hibernate-validator/ + - integration-tests/hibernate-validator/ + - labels: [area/jaeger] + title: "jaeger" + notify: [kenfinnigan] + directories: + - extensions/jaeger/ + - labels: [area/kotlin] + titleBody: "kotlin" + notify: [evanchooly] + directories: + - extensions/kotlin/ + - integration-tests/kotlin/ + - labels: [area/mongodb] + title: "mongo" + notify: [loicmathieu, evanchooly] + directories: + - extensions/mongodb-client/ + - integration-tests/mongodb-client/ + - integration-tests/mongodb-panache/ + - extensions/panache/mongodb-panache/ + - labels: [area/openapi, area/smallrye] + title: "openapi" + notify: [EricWittmann, MikeEdgar, phillip-kruger] + directories: + - extensions/smallrye-openapi + - labels: [area/graphql, area/smallrye] + title: "graphql" + notify: [phillip-kruger, jmartisk] + directories: + - extensions/smallrye-graphql/ + - extensions/vertx-graphql/ + - integration-tests/vertx-graphql/ + - labels: [area/tracing, area/smallrye] + title: "(trace|tracing|telemetry)" + notify: [kenfinnigan, Ladicek] + directories: + - extensions/smallrye-opentracing/ + - labels: [area/security, area/smallrye] + title: "jwt" + notify: [sberyozkin] + - labels: [area/security] + title: "security" + notify: [sberyozkin] + - labels: [area/metrics, area/smallrye] + title: "metrics" + notify: [jmartisk] + directories: + - extensions/smallrye-metrics + - labels: [area/metrics] + title: "micrometer" + notify: [ebullient] + directories: + - extensions/micrometer + - labels: [area/health, area/smallrye] + title: "health" + notify: [jmartisk, xstefank] + directories: + - extensions/smallrye-health/ + - labels: [area/fault-tolerance, area/smallrye] + title: "fault.tolerance" + notify: [Ladicek] + directories: + - extensions/smallrye-fault-tolerance/ + - labels: [area/mutiny, area/smallrye] + title: "mutiny" + notify: [cescoffier, jponge] + directories: + - extensions/mutiny/ + - extensions/rest-client-mutiny/ + - extensions/resteasy-mutiny-common/ + - extensions/resteasy-mutiny/ + - labels: [area/panache] + title: "panache" + notify: [FroMage, loicmathieu] + directories: + - extensions/panache/ + - integration-tests/hibernate-orm-panache/ + - labels: [area/qute] + title: "qute" + notify: [mkouba] + directories: + - extensions/qute/ + - extensions/resteasy-qute/ + - labels: [area/reactive-messaging, area/smallrye] + title: "reactive.messaging" + notify: [cescoffier, kenfinnigan, phillip-kruger] + directories: + - extensions/smallrye-reactive-messaging + - labels: [area/rest-client] + title: "rest.client" + notify: [phillip-kruger] + directories: + - extensions/rest-client/ + - labels: [area/smallrye] + title: "smallrye" + notify: [phillip-kruger, jmartisk, radcortez, Ladicek] + directories: + - extensions/smallrye- + - labels: [area/spring] + title: "spring" + notify: [geoand] + directories: + - extensions/spring- + - integration-tests/spring- + - labels: [env/windows] + titleBody: "windows" + - labels: [area/kubernetes] + titleBody: "kubernetes" + notify: [geoand] + directories: + - extensions/kubernetes/ + - extensions/kubernetes-client/ + - integration-tests/kubernetes/ + - integration-tests/kubernetes-client/ + - labels: [area/kubernetes] + titleBody: "minikube" + notify: [geoand] + - labels: [area/kubernetes] + titleBody: "openshift" + notify: [geoand] + - labels: [area/kubernetes] + titleBody: "knative" + notify: [geoand] + - labels: [area/container-image] + titleBody: "jib" + notify: [geoand] + - labels: [area/kafka] + notify: [cescoffier] + title: "kafka" + directories: + - extensions/kafka-client/ + - integration-tests/kafka/ + - labels: [area/kafka-streams] + title: "k(afka)?(\\s|-)?stream" + notify: [gunnarmorling, rquinio] + directories: + - extensions/kafka-streams + - integration-tests/kafka-streams/ + - labels: [area/infinispan] + title: "infinispan" + notify: [karesti, wburns] + directories: + - extensions/infinispan-client/ + - integration-tests/infinispan-cache-jpa/ + - integration-tests/infinispan-client/ + - labels: [area/grpc] + title: "grpc" + notify: [michalszynkiewicz, cescoffier] + directories: + - extensions/grpc-common + - extensions/grpc + - labels: [area/scheduler] + title: "schedule(r)?" + notify: [mkouba] + directories: + - extensions/scheduler/ + - labels: [area/scheduler] + title: "quartz" + notify: [mkouba, machi1990] + directories: + - extensions/quartz/ + - integration-tests/quartz/ + - labels: [area/redis] + title: "redis" + notify: [machi1990, gsmet, cescoffier] + directories: + - extensions/redis-client/ + - integration-tests/redis-client/ + - labels: [area/arc] + title: "\\b(arc|cdi|injection)\\b" + notify: [mkouba, manovotn] + directories: + - extensions/arc/ + - independent-projects/arc/ + - labels: [area/google-cloud-functions] + title: "google.cloud" + notify: [loicmathieu] + directories: + - extensions/google-cloud-functions + - integration-tests/google-cloud-functions + - labels: [area/mandrel] + titleBody: "mandrel" + notify: [galderz, zakkak] + - labels: [area/artemis] + directories: + - extensions/artemis-core/ + - extensions/artemis-jms/ + - integration-tests/artemis-core/ + - integration-tests/artemis-jms/ + - labels: [area/cache] + title: "cache" + notify: [gwenneg] + directories: + - extensions/cache/ + - integration-tests/cache/ + - labels: [area/cli] + directories: + - devtools/cli/ + - labels: [area/config] + directories: + - extensions/config-yaml/ + - core/deployment/src/main/java/io/quarkus/deployment/configuration/ + - core/runtime/src/main/java/io/quarkus/runtime/configuration/ + - labels: [area/core] + directories: + - core/ + - labels: [area/dependencies] + directories: + - .github/dependabot.yml + - bom/ + - build-parent/ + - labels: [area/devtools] + directories: + - devtools/ + - independent-projects/bootstrap/ + - independent-projects/tools/ + - labels: [area/documentation] + directories: + - docs/ + - labels: [area/infra-automation] + directories: + - .github/ + - labels: [area/jaxb] + title: "jaxb" + notify: [gsmet] + directories: + - extensions/jaxb/ + - labels: [area/logging] + directories: + - extensions/logging-gelf/ + - extensions/logging-json/ + - extensions/logging-sentry/ + - integration-tests/logging-gelf/ + - core/runtime/src/main/java/io/quarkus/runtime/logging/ + - core/deployment/src/main/java/io/quarkus/logging/ + - labels: [area/narayana] + directories: + - extensions/narayana-jta/ + - extensions/narayana-stm/ + - integration-tests/narayana-jta/ + - integration-tests/narayana-stm/ + - labels: [area/neo4j] + title: "neo4j" + notify: [michael-simons] + directories: + - extensions/neo4j/ + - integration-tests/neo4j/ + - labels: [area/oidc] + title: "oidc" + notify: [sberyozkin, pedroigor] + directories: + - extensions/oidc/ + - integration-tests/oidc/ + - integration-tests/oidc-code-flow/ + - labels: [area/platform] + directories: + - independent-projects/tools/ + - labels: [area/reactive] + directories: + - extensions/reactive-mysql-client/ + - extensions/reactive-pg-client/ + - extensions/reactive-streams-operators/ + - integration-tests/reactive-mysql-client/ + - integration-tests/reactive-pg-client/ + - labels: [area/resteasy] + directories: + - extensions/resteasy/ + - extensions/resteasy-common/ + - extensions/resteasy-jackson/ + - extensions/resteasy-jsonb/ + - extensions/resteasy-jaxb/ + - extensions/resteasy-multipart/ + - extensions/resteasy-server-common/ + - integration-tests/resteasy-jackson/ + - integration-tests/elytron-resteasy/ + - integration-tests/virtual-http-resteasy/ + - labels: [area/resteasy-reactive] + title: resteasy.reactive + notify: [geoand, FroMage, stuartwdouglas] + directories: + - extensions/resteasy-reactive/ + - labels: [area/scala] + directories: + - extensions/scala/ + - integration-tests/scala/ + - labels: [area/vault] + title: vault + notify: [vsevel] + directories: + - extensions/vault/ + - integration-tests/vault + - labels: [area/vertx] + directories: + - extensions/vertx + - integration-tests/vertx + - labels: [area/tika] + title: tika + notify: [sberyozkin] + directories: + - extensions/tika/ + - integration-tests/tika/ + - labels: [area/testing] + directories: + - test-framework/ + - labels: [area/undertow] + directories: + - extensions/undertow/ + - integration-tests/elytron-undertow/ + - labels: [area/websockets] + directories: + - extensions/websockets/ + - labels: [area/swagger-ui] + title: "swagger" + notify: [phillip-kruger] + directories: + - extensions/swagger-ui/ + - labels: [area/security] + directories: + - extensions/security/ + - extensions/elytron + - extensions/vertx-keycloak/ + - integration-tests/elytron + - labels: [area/flyway] + title: "flyway" + notify: [cristhiank, geoand, gastaldi, gsmet] + directories: + - extensions/flyway/ + - integration-tests/flyway/ + - labels: [area/liquibase] + title: "liquibase" + notify: [andrejpetras, geoand, gsmet] + directories: + - extensions/liquibase/ + - integration-tests/liquibase/ + - labels: [area/kogito] + title: "kogito" + notify: [evacchi, mariofusco] + - labels: [area/optaplanner] + title: "optaplanner" + notify: [ge0ffrey, rsynek, Christopher-Chianelli] + - labels: [kind/extension-proposal] + title: quarkiverse + notify: [gastaldi, gsmet, aloubyansky] diff --git a/.github/quarkusbuilditemdoc.java b/.github/quarkusbuilditemdoc.java new file mode 100755 index 0000000000000..7b947446ad03e --- /dev/null +++ b/.github/quarkusbuilditemdoc.java @@ -0,0 +1,228 @@ +//usr/bin/env jbang "$0" "$@" ; exit $? +//DEPS info.picocli:picocli:4.5.0 +//DEPS org.jboss.forge.roaster:roaster-jdt:2.22.2.Final +//DEPS org.eclipse.collections:eclipse-collections:10.4.0 +//DEPS org.yaml:snakeyaml:1.27 +//DEPS io.fabric8:maven-model-helper:16 + +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.Callable; + +import io.fabric8.maven.Maven; +import org.eclipse.collections.api.multimap.Multimap; +import org.eclipse.collections.api.multimap.MutableMultimap; +import org.eclipse.collections.api.tuple.Pair; +import org.eclipse.collections.impl.factory.Multimaps; +import org.eclipse.collections.impl.tuple.Tuples; +import org.jboss.forge.roaster.Roaster; +import org.jboss.forge.roaster.model.JavaDocCapable; +import org.jboss.forge.roaster.model.source.FieldSource; +import org.jboss.forge.roaster.model.source.JavaClassSource; +import org.yaml.snakeyaml.Yaml; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +@Command(name = "builditemdoc", mixinStandardHelpOptions = true, version = "builditemdoc 0.1", + description = "builditemdoc made with jbang") +class quarkusbuilditemdoc implements Callable { + + @CommandLine.Option(names = {"--outputFile"}) + public Path outputFile; + + @CommandLine.Option(names = {"--dirs"}, required = true, arity = "1..*") + public List paths; + + private PrintStream out = System.out; + + public static void main(String... args) { + int exitCode = new CommandLine(new quarkusbuilditemdoc()).execute(args); + System.exit(exitCode); + } + + // jbang .github/quarkusbuilditemdoc.java --outputFile target/asciidoc/generated/config/quarkus-all-build-items.adoc --dirs core/deployment core/test-extension extensions + @Override + public Integer call() throws Exception { + if (outputFile != null) { + Files.createDirectories(outputFile.getParent()); + out = new PrintStream(Files.newOutputStream(outputFile)); + } + final Multimap> multimap = collect(); + Map names = extractNames(Paths.get("."), multimap.keySet()); + // Print Core first + { + printTableHeader(names.remove("Core")); + for (Pair source : multimap.get("Core")) { + printTableRow(source); + } + printTableFooter(); + } + names.forEach((key, name) -> { + printTableHeader(name); + for (Pair source : multimap.get(key)) { + printTableRow(source); + } + printTableFooter(); + }); + return 0; + } + + private String getJavaDoc(JavaDocCapable source) { + if (!source.hasJavaDoc()) { + return "No Javadoc found"; + } + return source.getJavaDoc().getText(); + } + + private Multimap> collect() throws IOException { + MutableMultimap> multimap = Multimaps.mutable.sortedSet + .with(Comparator.comparing(o -> o.getTwo().getName())); + String line; + for (Path path : paths) { + Files.walkFileTree(path, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (file.toString().endsWith("BuildItem.java")) { + process(multimap, file); + } + return FileVisitResult.CONTINUE; + } + }); + } + return multimap; + } + + private void process(MutableMultimap> multimap, Path path) throws IOException { + JavaClassSource source = Roaster.parse(JavaClassSource.class, path.toFile()); + // Ignore deprecated annotations and non-public classes + if (!source.hasAnnotation(Deprecated.class) && source.isPublic()) { + String name; + Path pom = findPom(path); + if (pom != null) { + name = Maven.readModel(pom).getName(); + } else { + String pathString = path.toString(); + int spiIdx = pathString.indexOf("/spi/src"); + int runtimeIdx = pathString.indexOf("/runtime/src"); + int deploymentIdx = pathString.indexOf("/deployment/src"); + int idx = Math.max(Math.max(spiIdx, runtimeIdx), deploymentIdx); + int extensionsIdx = pathString.indexOf("extensions/"); + int startIdx = 0; + if (extensionsIdx != -1) { + startIdx = extensionsIdx + 11; + } + if (idx == -1) { + name = pathString.substring(startIdx, pathString.indexOf("/", startIdx + 1)); + } else { + name = pathString.substring(startIdx, idx); + } + } + // sanitize name + name = name.replace("Quarkus - ", "") + .replace(" - Deployment", ""); + Pair pair = Tuples.pair(path, source); + multimap.put(name, pair); + } + } + + private Path findPom(Path path) { + Path pom = null; + Path parent = path; + while (pom == null && (parent = parent.getParent()) != null) { + Path resolve = parent.resolve("pom.xml"); + if (Files.exists(resolve)) { + pom = resolve; + } + } + return pom; + } + + + private Map extractNames(Path root, Iterable extensionDirs) throws IOException { + Map names = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + Yaml yaml = new Yaml(); + for (String extension : extensionDirs) { + Path yamlPath = root.resolve("extensions/" + extension + "/runtime/src/main/resources/META-INF/quarkus-extension.yaml"); + if (Files.exists(yamlPath)) { + try (InputStream is = Files.newInputStream(yamlPath)) { + Map map = yaml.load(is); + names.put(extension, map.get("name")); + } + } else { + names.put(extension, extension); + } + } + return names; + } + + private void printTableHeader(String title) { + out.println("== " + title); + out.println("[%header,cols=2*]"); + out.println("|==="); + out.println("|Class Name |Attributes "); + } + + private void printTableRow(Pair pair) { + //TODO: Use tagged version? + Path root = Paths.get(".").toAbsolutePath().normalize(); + String link = "https://github.com/quarkusio/quarkus/blob/main/" + root.relativize(pair.getOne().normalize()); + JavaClassSource source = pair.getTwo(); + String className = source.getQualifiedName(); + String attributes = buildAttributes(source); + String description = getJavaDoc(source); + + out.println("a| " + link + "[`" + className + "`, window=\"_blank\"] :: +++" + + javadocToHTML(description) + "+++"); + out.println("a| " + attributes); + } + + private String buildAttributes(JavaClassSource source) { + StringBuilder sb = new StringBuilder(); + for (FieldSource field : source.getFields()) { + if (field.isStatic()) { + continue; + } + sb.append("`" + field.getType().getName() + " " + field.getName() + "` :: "); + sb.append("+++" + javadocToHTML(getJavaDoc(field)) + "+++"); + sb.append("\n"); + } + return sb.length() == 0 ? "None" : sb.toString(); + } + + private void printTableFooter() { + out.println("|==="); + } + + private String javadocToHTML(String content) { + return content + .replaceAll("\\{?@see ", "
")
+                .replaceAll("\\{?@code ", "
")
+                .replaceAll("\\{?@link ", "
")
+                .replaceAll(" ?}", "
"); + } + + private String javadocToAsciidoc(String content) { + return content + .replaceAll("

", "\n") + .replaceAll("

", "\n") + .replaceAll("\\{?@see ", "```") + .replaceAll("\\{?@code ", "```") + .replaceAll("\\{?@link ", "```") + .replaceAll("
", "```\n")
+                .replaceAll("
", "\n```") + .replaceAll(" ?}", "```"); + } + + +} diff --git a/.github/workflows/autolabel.yml b/.github/workflows/autolabel.yml deleted file mode 100644 index 85c2ed5e518f6..0000000000000 --- a/.github/workflows/autolabel.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: "Auto label new issues" -on: - issues: - types: [opened] - -jobs: - autolabel: - runs-on: ubuntu-latest - name: auto label - steps: -# - name: Debug Action -# uses: hmarr/debug-action@v1.0.0 - - name: issuelabeler - continue-on-error: true - uses: docker://maxandersen/jbang-issuelabeler - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} - CONFIG: https://raw.githubusercontent.com/quarkusio/quarkus/master/.github/autoissuelabeler.yml - diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml new file mode 100644 index 0000000000000..0387d4ea9ad8b --- /dev/null +++ b/.github/workflows/ci-actions-incremental.yml @@ -0,0 +1,717 @@ +name: Quarkus CI + +on: + push: + branches-ignore: + - 'dependabot/**' + # paths-ignore in ci-fork-mvn-cache.yml should match + paths-ignore: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - 'docs/src/main/asciidoc/**' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + - '.github/*.conf' + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + paths-ignore: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - 'docs/src/main/asciidoc/**' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + - '.github/*.conf' + +env: + # Workaround testsuite locale issue + LANG: en_US.UTF-8 + NATIVE_TEST_MAVEN_OPTS: "-B --settings .github/mvn-settings.xml --fail-at-end -Dquarkus.native.container-build=true -Dtest-containers -Dstart-containers -Dnative-image.xmx=5g -Dnative -Dnative.surefire.skip -Dformat.skip -Dno-descriptor-tests install -DskipDocs" + WINDOWS_NATIVE_TEST_MAVEN_OPTS: "-B --settings .github/mvn-settings.xml --fail-at-end -Dtest-containers -Dstart-containers -Dnative-image.xmx=5g -Dnative -Dnative.surefire.skip -Dformat.skip -Dno-descriptor-tests install -DskipDocs" + JVM_TEST_MAVEN_OPTS: "-e -B --settings .github/mvn-settings.xml -Dtest-containers -Dstart-containers -Dformat.skip -DskipDocs" + DB_USER: hibernate_orm_test + DB_PASSWORD: hibernate_orm_test + DB_NAME: hibernate_orm_test +jobs: + ci-sanity-check: + name: "CI Sanity Check" + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + steps: + - name: Build + run: sleep 30 + build-jdk11: + name: "Initial JDK 11 Build" + runs-on: ubuntu-latest + # Skip main in forks + # Skip draft PRs and those with WIP in the subject, rerun as soon as its removed + if: "(github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')) && ( \ + github.event_name != 'pull_request' || ( \ + github.event.pull_request.draft == false && \ + github.event.pull_request.state != 'closed' && \ + contains(github.event.pull_request.title, 'wip ') == false && \ + contains(github.event.pull_request.title, '[wip]') == false && \ + ( + github.event.action != 'edited' || \ + contains(github.event.changes.title.from, 'wip ') || \ + contains(github.event.changes.title.from, '[wip]') \ + ) \ + ) \ + )" + steps: + - uses: actions/checkout@v2 + - uses: n1hility/cancel-previous-runs@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" + shell: bash + - name: Cache Maven Repository + id: cache-maven + uses: actions/cache@v2 + with: + path: ~/.m2/repository + # refresh cache every month to avoid unlimited growth + key: q2maven-${{ steps.get-date.outputs.date }} + - name: Build + # note: '-Pincremental -Dgib.disable' shall only trigger the download of gitflow-incremental-builder, not activate it + # since -Dincremental deactivates dependencies in bom-descriptor-json (which we do need here), -P is used instead + run: | + mvn -T1C -e -B -DskipTests -DskipITs -Dinvoker.skip -Dno-format -Dtcks -Pincremental -Dgib.disable --settings .github/mvn-settings.xml clean install + - name: Verify extension dependencies + shell: bash + run: | + ./update-extension-dependencies.sh -B --settings .github/mvn-settings.xml + if [ `git status -s -u no '*pom.xml' | wc -l` -ne 0 ] + then + echo -e '\033[0;31mError:\033[0m Dependencies to extension artifacts are outdated!' 1>&2 + echo -e '\033[0;31mError:\033[0m Run ./update-extension-dependencies.sh and add the modified pom.xml files to your commit.' 1>&2 + exit 1 + fi + - name: Tar Maven Repo + shell: bash + run: tar -I 'pigz -9' -cf maven-repo.tgz -C ~ .m2/repository + - name: Persist Maven Repo + uses: actions/upload-artifact@v1 + with: + name: maven-repo + path: maven-repo.tgz + - name: Delete Local Artifacts From Cache + shell: bash + run: rm -r ~/.m2/repository/io/quarkus + - name: Get GIB arguments + id: get-gib-args + env: + PULL_REQUEST_BASE: ${{ github.event.pull_request.base.ref }} + run: | + # See also: https://github.com/gitflow-incremental-builder/gitflow-incremental-builder#configuration (GIB) + # Common GIB_ARGS for all CI cases (hint: see also root pom.xml): + # - disableSelectedProjectsHandling: required to detect changes in jobs that use -pl + # - untracked: to ignore files created by jobs (and uncommitted to be consistent) + GIB_ARGS="-Dincremental -Dgib.disableSelectedProjectsHandling -Dgib.untracked=false -Dgib.uncommitted=false" + if [ -n "$PULL_REQUEST_BASE" ] + then + # The PR defines a clear merge target so just use that branch for reference, *unless*: + # - the current branch is a backport branch + GIB_ARGS+=" -Dgib.referenceBranch=origin/$PULL_REQUEST_BASE -Dgib.disableIfBranchMatches='.*backport.*'" + else + # No PR means the merge target is uncertain so fetch & use main of quarkusio/quarkus, *unless*: + # - the current branch is main or some released branch like 1.10 + # - the current branch is a backport branch targeting some released branch like 1.10 (merge target is not main) + GIB_ARGS+=" -Dgib.referenceBranch=refs/remotes/quarkusio/main -Dgib.fetchReferenceBranch -Dgib.disableIfBranchMatches='main|\d+\.\d+|.*backport.*'" + fi + echo "GIB_ARGS: $GIB_ARGS" + echo "::set-output name=gib_args::${GIB_ARGS}" + outputs: + gib_args: ${{ steps.get-gib-args.outputs.gib_args }} + + linux-jvm-tests: + name: JVM Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 240 + env: + MAVEN_OPTS: -Xmx2048m -XX:MaxMetaspaceSize=1000m + strategy: + fail-fast: false + matrix: + java: + - { + name: "8", + java-version: 8, + maven_args: "-pl !integration-tests/kubernetes/quarkus-standard-way" + } + - { + name: "11", + java-version: 11, + maven_args: "" + } + - { + name: "15", + java-version: 15, + maven_args: "-pl !integration-tests/kubernetes/quarkus-standard-way" + } + + steps: + - name: Stop mysql + shell: bash + run: | + ss -ln + sudo service mysql stop || true + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + + - name: apt clean + shell: bash + run: sudo apt-get clean + + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + release: ${{ matrix.java.release }} + + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devtools ${{ matrix.java.maven_args }} ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-jvm${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-tests: + name: JVM Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 180 + env: + MAVEN_OPTS: -Xmx1408m -XX:MaxMetaspaceSize=1000m + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + shell: bash + run: mvn -B --settings .github/mvn-settings.xml -DskipDocs -Dformat.skip -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devtools install ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: | + # Disambiguate windows find from cygwin find + /usr/bin/find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-jdk11-jvm + path: 'test-reports.tgz' + + linux-jvm-maven-tests: + name: Maven Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + java: + - { + name: "11", + java-version: 11 + } + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + - name: Run Maven integration tests + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/maven' ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-maven-java${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-maven-tests: + name: Maven Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Run Maven integration tests + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/maven' ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-maven-java11 + path: 'test-reports.tgz' + + gradle-tests-jdk11-jvm: + name: Gradle Tests - JDK 11 ${{matrix.os.family}} + runs-on: ${{matrix.os.name}} + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 80 + strategy: + fail-fast: false + matrix: + os: + - { + name: "ubuntu-latest", + family: "Linux" + } + - { + name: "windows-latest", + family: "Windows" + } + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Verify dependencies + # runs on Windows as well but would require newline conversion, not worth it + if: matrix.os.family == 'Linux' + shell: bash + run: | + ./update-dependencies.sh -B --settings ../../.github/mvn-settings.xml + if [ `git status -s -u no '*pom.xml' | wc -l` -ne 0 ] + then + echo -e '\033[0;31mError:\033[0m Dependencies in integration-tests/gradle/pom.xml are outdated!' 1>&2 + echo -e '\033[0;31mError:\033[0m Run update-dependencies.sh in integration-tests/gradle and add the modified pom.xml file to your commit.' 1>&2 + exit 1 + fi + working-directory: integration-tests/gradle + - name: Build with Gradle via Maven + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl integration-tests/gradle ${{ needs.build-jdk11.outputs.gib_args }} + + linux-jvm-devtools-tests: + name: Devtools Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + java: + - { + name: "11", + java-version: 11 + } + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + - name: Run Devtools integration tests + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/devtools' ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-devtools-java${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-devtools-tests: + name: Devtools Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Run Devtools integration tests + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/devtools' ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-devtools-java11 + path: 'test-reports.tgz' + + tcks-test: + name: MicroProfile TCKs Tests + needs: build-jdk11 + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + runs-on: ubuntu-latest + timeout-minutes: 150 + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + path: tcks + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + working-directory: ./tcks + - name: Download RESTEasy Reactive Testsuite + uses: actions/checkout@v2 + with: + repository: quarkusio/resteasy-reactive-testsuite + path: resteasy-reactive-testsuite + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + working-directory: ./tcks + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Run RESTEasy Reactive TCK + # Note: No gib_args here because resteasy-reactive-testsuite is a different repo + run: mvn -B --settings ../tcks/.github/mvn-settings.xml install + working-directory: ./resteasy-reactive-testsuite + - name: Verify with Maven + run: mvn -B --settings .github/mvn-settings.xml -Dtcks -pl tcks -amd install ${{ needs.build-jdk11.outputs.gib_args }} + working-directory: ./tcks + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-tcks + path: 'test-reports.tgz' + + native-tests-read-json-matrix: + name: Native Tests - Read JSON matrix + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + outputs: + matrix: ${{ steps.read.outputs.matrix }} + steps: + - uses: actions/checkout@v2 + - id: read + run: | + json=$(tr -d '\n' < .github/native-tests.json ) + echo $json + echo "::set-output name=matrix::${json}" + + native-tests: + name: Native Tests - ${{matrix.category}} + needs: [build-jdk11, native-tests-read-json-matrix] + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + # Ignore the following YAML Schema error + timeout-minutes: ${{matrix.timeout}} + strategy: + max-parallel: 8 + fail-fast: false + matrix: ${{ fromJson(needs.native-tests-read-json-matrix.outputs.matrix) }} + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + # We do this so we can get better analytics for the downloaded version of the build images + - name: Update Docker Client User Agent + shell: bash + run: | + cat <<< $(jq '.HttpHeaders += {"User-Agent": "Quarkus-CI-Docker-Client"}' ~/.docker/config.json) > ~/.docker/config.json + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + env: + TEST_MODULES: ${{matrix.test-modules}} + CATEGORY: ${{matrix.category}} + run: | + for i in $TEST_MODULES + do modules+="integration-tests/$i,"; done + mvn -pl "${modules}" $NATIVE_TEST_MAVEN_OPTS ${{ needs.build-jdk11.outputs.gib_args }} + # add the 'simple with spaces' project to the run of 'Misc1' by executing it explicitly + # done because there is no good way to pass strings with empty values to the previous command + # so this hack is as good as any + if [ "$CATEGORY" == "Misc1" ]; then + mvn -Dnative -Dquarkus.native.container-build=true -B --settings .github/mvn-settings.xml -pl 'integration-tests/simple with space/' verify ${{ needs.build-jdk11.outputs.gib_args }} + fi + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-native-${{matrix.category}} + path: 'test-reports.tgz' + native-tests-windows: + name: Native Tests - Windows - ${{matrix.category}} + needs: [build-jdk11, native-tests-read-json-matrix] + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + # Ignore the following YAML Schema error + timeout-minutes: ${{matrix.timeout}} + strategy: + max-parallel: 8 + fail-fast: false + # we only run a native build on Windows for Hibernate Validator just to be sure + # that we can build native executables on Windows + # matrix: ${{ fromJson(needs.native-tests-read-json-matrix.outputs.matrix) }} + matrix: + category: [hibernate-validator] + include: + - category: hibernate-validator + timeout: 20 + test-modules: hibernate-validator + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Add quarkusio remote + shell: bash + run: git remote add quarkusio https://github.com/quarkusio/quarkus.git + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Install cl.exe + uses: ilammy/msvc-dev-cmd@v1 + - name: Setup GraalVM + id: setup-graalvm + uses: DeLaGuardo/setup-graalvm@master + with: + graalvm-version: '21.0.0.java11' + - name: Install native-image component + run: | + gu.cmd install native-image + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + # We do this so we can get better analytics for the downloaded version of the build images + - name: Update Docker Client User Agent + shell: bash + run: | + cat <<< $(jq '.HttpHeaders += {"User-Agent": "Quarkus-CI-Docker-Client"}' ~/.docker/config.json) > ~/.docker/config.json + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + shell: bash + env: + TEST_MODULES: ${{matrix.test-modules}} + CATEGORY: ${{matrix.category}} + run: | + for i in $TEST_MODULES + do modules+="integration-tests/$i,"; done + mvn -pl "${modules}" $WINDOWS_NATIVE_TEST_MAVEN_OPTS ${{ needs.build-jdk11.outputs.gib_args }} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-native-windows-${{matrix.category}} + path: 'test-reports.tgz' diff --git a/.github/workflows/ci-actions.yml b/.github/workflows/ci-actions.yml deleted file mode 100644 index f2c9136e6497c..0000000000000 --- a/.github/workflows/ci-actions.yml +++ /dev/null @@ -1,673 +0,0 @@ -name: Quarkus CI - -on: - push: - branches-ignore: - - 'dependabot/**' - paths-ignore: - - 'docs/src/main/asciidoc/**' - - 'README.md' - - 'CONTRIBUTING.md' - - 'ADOPTERS.md' - - 'TROUBLESHOOTING.md' - - 'SECURITY.md' - - 'MAINTAINERS.adoc' - - 'DECISIONS.adoc' - - 'LICENSE.txt' - - 'dco.txt' - - '.github/ISSUE_TEMPLATE/**' - - '.github/autoissuelabeler.yml' - - '.github/boring-cyborg.yml' - - '.github/NativeBuildReport.java' - - '.github/dependabot.yml' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - paths-ignore: - - 'docs/src/main/asciidoc/**' - - 'README.md' - - 'CONTRIBUTING.md' - - 'ADOPTERS.md' - - 'TROUBLESHOOTING.md' - - 'SECURITY.md' - - 'DECISIONS.adoc' - - 'MAINTAINERS.adoc' - - 'LICENSE.txt' - - 'dco.txt' - - '.github/ISSUE_TEMPLATE/**' - - '.github/autoissuelabeler.yml' - - '.github/boring-cyborg.yml' - - '.github/NativeBuildReport.java' - - '.github/dependabot.yml' - -env: - # Workaround testsuite locale issue - LANG: en_US.UTF-8 - NATIVE_TEST_MAVEN_OPTS: "-B --settings .github/mvn-settings.xml --fail-at-end -Dquarkus.native.container-build=true -Dtest-postgresql -Dtest-elasticsearch -Dtest-keycloak -Dtest-amazon-services -Dtest-db2 -Dtest-mysql -Dtest-mariadb -Dmariadb.base_url='jdbc:mariadb://localhost:3308' -Dmariadb.url='jdbc:mariadb://localhost:3308/hibernate_orm_test' -Dtest-mssql -Dtest-vault -Dtest-neo4j -Dtest-kafka -Dtest-redis -Dnative-image.xmx=5g -Dnative -Dnative.surefire.skip -Dformat.skip -Dno-descriptor-tests install" - JVM_TEST_MAVEN_OPTS: "-e -B --settings .github/mvn-settings.xml -Dtest-postgresql -Dtest-elasticsearch -Dtest-db2 -Dtest-mysql -Dtest-mariadb -Dmariadb.base_url='jdbc:mariadb://localhost:3308' -Dmariadb.url='jdbc:mariadb://localhost:3308/hibernate_orm_test' -Dtest-mssql -Dtest-amazon-services -Dtest-vault -Dtest-neo4j -Dtest-kafka -Dtest-keycloak -Dtest-redis -Dformat.skip" - DB_USER: hibernate_orm_test - DB_PASSWORD: hibernate_orm_test - DB_NAME: hibernate_orm_test -jobs: - build-jdk11: - name: "JDK 11 Build" - runs-on: ubuntu-latest - # Skip draft PRs and those with WIP in the subject, rerun as soon as its removed - if: "github.event_name != 'pull_request' || ( \ - github.event.pull_request.draft == false && \ - github.event.pull_request.state != 'closed' && \ - contains(github.event.pull_request.title, 'wip ') == false && \ - contains(github.event.pull_request.title, '[wip]') == false && \ - ( - github.event.action != 'edited' || \ - contains(github.event.changes.title.from, 'wip ') || \ - contains(github.event.changes.title.from, '[wip]') \ - ) \ - )" - steps: - - uses: actions/checkout@v2 - - uses: n1hility/cancel-previous-runs@v2 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Reclaim Disk Space - run: .github/ci-prerequisites.sh - - name: Set up JDK 11 - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: 11 - - name: Compute cache restore key - # Always recompute on a push so that the maven repo doesnt grow indefinitely with old versions - run: | - if ${{ github.event_name == 'pull_request' }}; then echo "::set-env name=COMPUTED_RESTORE_KEY::q2maven-"; fi - - name: Cache Maven Repository - id: cache-maven - uses: n1hility/cache@v2 - with: - path: ~/.m2/repository - # Improves the reusability of the cache to limit key changes - key: q2maven-${{ hashFiles('bom/runtime/pom.xml') }} - restore-keys: ${{ env.COMPUTED_RESTORE_KEY }} - restore-only: ${{ github.event_name == 'pull_request' }} - - name: Build - run: | - mvn -e -B -DskipTests -DskipITs -Dno-format -Ddocumentation-pdf clean install - - name: Tar Maven Repo - shell: bash - run: tar -czf maven-repo.tgz -C ~ .m2/repository - - name: Persist Maven Repo - uses: actions/upload-artifact@v1 - with: - name: maven-repo - path: maven-repo.tgz - - linux-jvm-tests: - name: JDK ${{matrix.java.name}} JVM Tests - runs-on: ubuntu-latest - needs: build-jdk11 - timeout-minutes: 180 - env: - MAVEN_OPTS: -Xmx2048m - strategy: - fail-fast: false - matrix: - java : - - { name: Java8, - java-version: 8, - maven_args: "-pl !integration-tests/vault-app,!integration-tests/vault-agroal,!integration-tests/vault,!integration-tests/google-cloud-functions-http,!integration-tests/gradle,!integration-tests/google-cloud-functions,!integration-tests/funqy-google-cloud-functions" - } - - { - name: "Java 8 - 242", - java-version: 8, - release: "jdk8u242-b08", - maven_args: "-pl !integration-tests/google-cloud-functions-http,!integration-tests/gradle,!integration-tests/google-cloud-functions,!integration-tests/funqy-google-cloud-functions" - } - - { - name: Java 11, - java-version: 11, - maven_args: "-pl !integration-tests/gradle" - } - - { - name: Java 14, - java-version: 14, - maven_args: "-pl !integration-tests/gradle" - } - - services: - keycloak: - image: quay.io/keycloak/keycloak:11.0.0 - env: - KEYCLOAK_USER: admin - KEYCLOAK_PASSWORD: admin - JAVA_OPTS: "-server -Xms64m -Xmx512m -XX:MetaspaceSize=96M -XX:MaxMetaspaceSize=256m -Djava.net.preferIPv4Stack=true -Djava.awt.headless=true -Dkeycloak.profile.feature.upload_scripts=enabled" - ports: - - 127.0.0.1:8180:8080 - - 127.0.0.1:8543:8443 - postgres: - image: postgres:10.5 - env: - POSTGRES_USER: hibernate_orm_test - POSTGRES_PASSWORD: hibernate_orm_test - POSTGRES_DB: hibernate_orm_test - ports: - - 127.0.0.1:5432:5432 - mariadb: - image: mariadb:10.4 - env: - MYSQL_USER: hibernate_orm_test - MYSQL_PASSWORD: hibernate_orm_test - MYSQL_DATABASE: hibernate_orm_test - MYSQL_ROOT_PASSWORD: secret - ports: - - 127.0.0.1:3308:3306 - mssql: - image: microsoft/mssql-server-linux:2017-CU13 - env: - ACCEPT_EULA: Y - SA_PASSWORD: ActuallyRequired11Complexity - ports: - - 127.0.0.1:1433:1433 - neo4j: - image: neo4j/neo4j-experimental:4.0.0-rc01 - env: - NEO4J_AUTH: neo4j/secret - NEO4J_dbms_memory_pagecache_size: 10M - NEO4J_dbms_memory_heap_initial__size: 10M - ports: - - 127.0.0.1:7687:7687 - amazonServices: - image: localstack/localstack:0.11.1 - env: - SERVICES: s3,dynamodb,sns,sqs,kms,ses - START_WEB: 0 - ports: - - 127.0.0.1:8000:4569 - - 127.0.0.1:8008:4572 - - 127.0.0.1:8009:4575 - - 127.0.0.1:8010:4576 - - 127.0.0.1:8011:4599 - - 127.0.0.1:8012:4566 - redis: - image: redis:5.0.8-alpine - ports: - - 127.0.0.1:6379:6379 - steps: - - name: Start mysql - shell: bash - run: | - netstat -ln - sudo service mysql stop || true - docker run --rm --publish 127.0.0.1:3306:3306 --name build-mysql -e MYSQL_USER=$DB_USER -e MYSQL_PASSWORD=$DB_PASSWORD -e MYSQL_DATABASE=$DB_NAME -e MYSQL_RANDOM_ROOT_PASSWORD=true -e MYSQL_DATABASE=hibernate_orm_test -d mysql:5 --skip-ssl - - name: Start DB2 - # Cannot run as a service becuase the service schema does not support --privileged mode - shell: bash - run: | - docker run --rm --publish 50000:50000 --name build-db2 --privileged=true \ - -e DB2INSTANCE=hreact -e DB2INST1_PASSWORD=hreact -e DBNAME=hreact -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false \ - -d ibmcom/db2:11.5.0.0a - - - uses: actions/checkout@v2 - - - name: apt clean - shell: bash - run: sudo apt-get clean - - - name: Reclaim Disk Space - run: .github/ci-prerequisites.sh - - - name: Set up JDK ${{ matrix.java.name }} - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: ${{ matrix.java.java-version }} - release: ${{ matrix.java.release }} - - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Maven - run: eval mvn $JVM_TEST_MAVEN_OPTS install ${{ matrix.java.maven_args}} - - name: Prepare failure archive (if maven failed) - if: failure() - shell: bash - run: find . -name '*-reports' -type d | tar -czf test-reports.tgz -T - - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@v1 - if: failure() - with: - name: test-reports-linux-jvm${{matrix.java.name}} - path: 'test-reports.tgz' - - linux-jvm-gradle-tests: - name: JDK ${{matrix.java.name}} JVM Gradle Tests - runs-on: ubuntu-latest - needs: build-jdk11 - timeout-minutes: 60 - strategy: - fail-fast: false - matrix: - java: - - { name: Java8, - java-version: 8 - } - - { - name: "Java 8 - 242", - java-version: 8, - release: "jdk8u242-b08" - } - - { - name: Java 11, - java-version: 11 - } - - { - name: Java 14, - java-version: 14 - } - steps: - - uses: actions/checkout@v2 - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Gradle - uses: eskatos/gradle-command-action@v1 - env: - GRADLE_OPTS: -Xmx1408m - with: - gradle-version: wrapper - wrapper-directory: integration-tests/gradle - build-root-directory: integration-tests/gradle - arguments: clean test -i -S --stacktrace --no-daemon - - windows-jdk11-jvm-tests: - name: Windows JDK 11 JVM Tests - needs: build-jdk11 - runs-on: windows-latest - timeout-minutes: 180 - env: - MAVEN_OPTS: -Xmx1408m - - steps: - - uses: actions/checkout@v2 - - name: Set up JDK 11 - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: 11 - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Maven - shell: bash - run: mvn -B --settings .github/mvn-settings.xml -Dno-native -Dformat.skip -pl !integration-tests/gradle install - - name: Prepare failure archive (if maven failed) - if: failure() - shell: bash - run: | - # Disambiguate windows find from cygwin find - /usr/bin/find . -name '*-reports' -type d | tar -czf test-reports.tgz -T - - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@v1 - if: failure() - with: - name: test-reports-windows-jdk11-jvm - path: 'test-reports.tgz' - - windows-jdk11-jvm-gradle-tests: - name: Windows JDK 11 JVM Gradle Tests - needs: build-jdk11 - runs-on: windows-latest - timeout-minutes: 80 - steps: - - uses: actions/checkout@v2 - - name: Set up JDK 11 - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: 11 - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Gradle - uses: eskatos/gradle-command-action@v1 - timeout-minutes: 60 - env: - GRADLE_OPTS: -Xmx1408m - with: - gradle-version: wrapper - wrapper-directory: integration-tests/gradle - build-root-directory: integration-tests/gradle - arguments: clean test -i -S --stacktrace --no-daemon - - tcks-test: - name: TCKS Test - needs: build-jdk11 - runs-on: ubuntu-latest - timeout-minutes: 120 - - steps: - - uses: actions/checkout@v2 - - name: Reclaim Disk Space - run: .github/ci-prerequisites.sh - - name: Set up JDK 11 - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: 11 - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Maven - run: mvn -B --settings .github/mvn-settings.xml -Dno-native -Dno-format -DskipTests -Dtcks install - - name: Verify with Maven - run: mvn -B --settings .github/mvn-settings.xml -f tcks/pom.xml verify - - name: Prepare failure archive (if maven failed) - if: failure() - shell: bash - run: find . -name '*-reports' -type d | tar -czf test-reports.tgz -T - - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@v1 - if: failure() - with: - name: test-reports-tcks - path: 'test-reports.tgz' - - native-tests: - name: Native Tests - ${{matrix.category}} - needs: build-jdk11 - runs-on: ubuntu-latest - # Ignore the following YAML Schema error - timeout-minutes: ${{matrix.timeout}} - strategy: - max-parallel: 8 - fail-fast: false - matrix: - category: [Main, Data1, Data2, Data3, Data4, Data5, Data6, Security1, Security2, Security3, Amazon, Messaging, Cache, HTTP, Misc1, Misc2, Misc3, Misc4, Spring, gRPC] - include: - - category: Main - postgres: "true" - timeout: 40 - test-modules: main - - category: Data1 - mariadb: "true" - mssql: "true" - timeout: 65 - test-modules: > - jpa-h2 - jpa-mariadb - jpa-mssql - jpa-derby - jpa-without-entity - hibernate-tenancy - - category: Data2 - db2: "true" - mysql: "true" - mariadb: "true" - timeout: 65 - test-modules: > - jpa - jpa-mysql - jpa-db2 - reactive-mysql-client - reactive-db2-client - hibernate-reactive-db2 - hibernate-reactive-mysql - - category: Data3 - postgres: "true" - timeout: 70 - test-modules: > - flyway - hibernate-orm-panache - hibernate-orm-panache-kotlin - hibernate-orm-envers - liquibase - - category: Data4 - neo4j: "true" - redis: "true" - timeout: 55 - test-modules: > - mongodb-client - mongodb-panache - redis-client - neo4j - hibernate-orm-rest-data-panache - - category: Data5 - postgres: "true" - timeout: 65 - test-modules: > - jpa-postgresql - narayana-stm - narayana-jta - reactive-pg-client - hibernate-reactive-postgresql - - category: Data6 - postgres: "true" - timeout: 40 - test-modules: > - elasticsearch-rest-client - elasticsearch-rest-high-level-client - hibernate-search-elasticsearch - - category: Amazon - amazonServices: "true" - timeout: 45 - test-modules: > - amazon-services - amazon-lambda - amazon-lambda-http - - category: Messaging - timeout: 75 - test-modules: > - artemis-core - artemis-jms - kafka - kafka-streams - reactive-messaging-amqp - - category: Security1 - timeout: 50 - keycloak: "true" - test-modules: > - elytron-security-oauth2 - elytron-security - elytron-security-jdbc - elytron-undertow - elytron-security-ldap - - category: Security2 - timeout: 70 - keycloak: "true" - test-modules: > - elytron-resteasy - oidc - oidc-code-flow - oidc-tenancy - keycloak-authorization - - category: Security3 - timeout: 50 - test-modules: > - vault - vault-app - vault-agroal - - category: Cache - timeout: 55 - test-modules: > - infinispan-cache-jpa - infinispan-client - cache - - category: HTTP - timeout: 60 - test-modules: > - resteasy-jackson - resteasy-mutiny - vertx - vertx-http - vertx-web - vertx-graphql - virtual-http - rest-client - - category: Misc1 - timeout: 60 - test-modules: > - maven - jackson - jsonb - jsch - jgit - quartz - qute - consul-config - - category: Misc2 - timeout: 55 - test-modules: > - tika - hibernate-validator - test-extension - logging-gelf - bootstrap-config - # kubernetes-client alone takes 30mn+ - - category: Misc3 - timeout: 60 - test-modules: > - kubernetes-client - - category: Misc4 - timeout: 30 - test-modules: > - smallrye-graphql - picocli-native - gradle - - category: Spring - timeout: 50 - test-modules: > - spring-di - spring-web - spring-data-jpa - spring-boot-properties - spring-cloud-config-client - - category: gRPC - timeout: 65 - test-modules: > - grpc-health - grpc-interceptors - grpc-mutual-auth - grpc-plain-text - grpc-proto-v2 - grpc-streaming - grpc-tls - steps: - # These should be services, but services do not (yet) allow conditional execution - - name: Postgres Service - run: | - docker run --rm --publish 5432:5432 --name build-postgres \ - -e POSTGRES_USER=$DB_USER -e POSTGRES_PASSWORD=$DB_PASSWORD -e POSTGRES_DB=$DB_NAME \ - -d postgres:10.5 - if: matrix.postgres - - name: MySQL Service - run: | - sudo service mysql stop || true - docker run --rm --publish 3306:3306 --name build-mysql \ - -e MYSQL_USER=$DB_USER -e MYSQL_PASSWORD=$DB_PASSWORD -e MYSQL_DATABASE=$DB_NAME -e MYSQL_RANDOM_ROOT_PASSWORD=true \ - -d mysql:5 --skip-ssl - if: matrix.mysql - - name: DB2 Service - run: | - docker run --rm --publish 50000:50000 --name build-db2 --privileged=true \ - -e DB2INSTANCE=hreact -e DB2INST1_PASSWORD=hreact -e DBNAME=hreact -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false \ - -d ibmcom/db2:11.5.0.0a - if: matrix.db2 - - name: Maria DB Service - run: | - docker run --rm --publish 3308:3306 --name build-mariadb \ - -e MYSQL_USER=$DB_USER -e MYSQL_PASSWORD=$DB_PASSWORD -e MYSQL_DATABASE=$DB_NAME -e MYSQL_ROOT_PASSWORD=secret \ - -d mariadb:10.4 - if: matrix.mariadb - - name: MS-SQL Service - run: | - docker run --rm --publish 1433:1433 --name build-mssql \ - -e ACCEPT_EULA=Y -e SA_PASSWORD=ActuallyRequired11Complexity \ - -d microsoft/mssql-server-linux:2017-CU13 - if: matrix.mssql - - name: Amazon Services - run: | - docker run --rm --publish 8000:4569 --publish 8008:4572 --publish 8009:4575 --publish 8010:4576 --publish 8011:4599 --publish 8012:4566 --name build-amazon-service-clients -e SERVICES=s3,dynamodb,sns,sqs,kms,ses -e START_WEB=0 \ - -d localstack/localstack:0.11.1 - if: matrix.amazonServices - - name: Neo4j Service - run: | - docker run --rm --publish 7687:7687 --name build-neo4j \ - -e NEO4J_AUTH=neo4j/secret -e NEO4J_dbms_memory_pagecache_size=10M -e NEO4J_dbms_memory_heap_initial__size=10M \ - -d neo4j/neo4j-experimental:4.0.0-rc01 - if: matrix.neo4j - - name: Redis Service - run: docker run --rm --publish 6379:6379 --name build-redis -d redis:5.0.8-alpine - if: matrix.redis - - name: Keycloak Service - run: | - docker run --rm --publish 8180:8080 --publish 8543:8443 --name build-keycloak \ - -e KEYCLOAK_USER=admin -e KEYCLOAK_PASSWORD=admin -e JAVA_OPTS=" \ - -server -Xms64m -Xmx512m -XX:MetaspaceSize=96M \ - -XX:MaxMetaspaceSize=256m -Djava.net.preferIPv4Stack=true -Djava.awt.headless=true \ - -Dkeycloak.profile.feature.upload_scripts=enabled" \ - -d quay.io/keycloak/keycloak:11.0.0 - if: matrix.keycloak - - uses: actions/checkout@v2 - - name: Set up JDK 11 - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - with: - java-version: 11 - - name: Reclaim Disk Space - run: .github/ci-prerequisites.sh - - name: Download Maven Repo - uses: actions/download-artifact@v1 - with: - name: maven-repo - path: . - - name: Extract Maven Repo - shell: bash - run: tar -xzf maven-repo.tgz -C ~ - - name: Build with Maven - env: - TEST_MODULES: ${{matrix.test-modules}} - CATEGORY: ${{matrix.category}} - run: | - for i in $TEST_MODULES - do modules+=("integration-tests/$i"); done - IFS=, - eval mvn -pl "${modules[*]}" $NATIVE_TEST_MAVEN_OPTS - # add the 'simple with spaces' project to the run of 'Misc1' by executing it explicitly - # done because there is no good way to pass strings with empty values to the previous command - # so this hack is as good as any - if [ "$CATEGORY" == "Misc1" ]; then - mvn -Dnative -Dquarkus.native.container-build=true -B --settings .github/mvn-settings.xml -f 'integration-tests/simple with space/' verify - fi - - name: Prepare failure archive (if maven failed) - if: failure() - shell: bash - run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' | tar -czf test-reports.tgz -T - - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@v1 - if: failure() - with: - name: test-reports-native-${{matrix.category}} - path: 'test-reports.tgz' diff --git a/.github/workflows/ci-actions.yml.disabled b/.github/workflows/ci-actions.yml.disabled new file mode 100644 index 0000000000000..3b13eb5faa54d --- /dev/null +++ b/.github/workflows/ci-actions.yml.disabled @@ -0,0 +1,587 @@ +name: Quarkus CI + +on: + push: + branches-ignore: + - 'dependabot/**' + # paths-ignore in ci-fork-mvn-cache.yml should match + paths-ignore: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - 'docs/src/main/asciidoc/**' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + - '.github/*.conf' + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + paths-ignore: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - 'docs/src/main/asciidoc/**' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + - '.github/*.conf' + +env: + # Workaround testsuite locale issue + LANG: en_US.UTF-8 + NATIVE_TEST_MAVEN_OPTS: "-B --settings .github/mvn-settings.xml --fail-at-end -Dquarkus.native.container-build=true -Dtest-containers -Dstart-containers -Dnative-image.xmx=5g -Dnative -Dnative.surefire.skip -Dformat.skip -Dno-descriptor-tests install -DskipDocs" + JVM_TEST_MAVEN_OPTS: "-e -B --settings .github/mvn-settings.xml -Dtest-containers -Dstart-containers -Dformat.skip -DskipDocs" + DB_USER: hibernate_orm_test + DB_PASSWORD: hibernate_orm_test + DB_NAME: hibernate_orm_test +jobs: + ci-sanity-check: + name: "CI Sanity Check" + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + steps: + - name: Build + run: sleep 30 + build-jdk11: + name: "Initial JDK 11 Build" + runs-on: ubuntu-latest + # Skip main in forks + # Skip draft PRs and those with WIP in the subject, rerun as soon as its removed + if: "(github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')) && ( \ + github.event_name != 'pull_request' || ( \ + github.event.pull_request.draft == false && \ + github.event.pull_request.state != 'closed' && \ + contains(github.event.pull_request.title, 'wip ') == false && \ + contains(github.event.pull_request.title, '[wip]') == false && \ + ( + github.event.action != 'edited' || \ + contains(github.event.changes.title.from, 'wip ') || \ + contains(github.event.changes.title.from, '[wip]') \ + ) \ + ) \ + )" + steps: + - uses: actions/checkout@v2 + - uses: n1hility/cancel-previous-runs@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" + shell: bash + - name: Cache Maven Repository + id: cache-maven + uses: actions/cache@v2 + with: + path: ~/.m2/repository + # refresh cache every month to avoid unlimited growth + key: q2maven-${{ steps.get-date.outputs.date }} + - name: Build + run: | + mvn -T1C -e -B -DskipTests -DskipITs -Dinvoker.skip -Dno-format -Dtcks --settings .github/mvn-settings.xml clean install + - name: Verify extension dependencies + shell: bash + run: | + ./update-extension-dependencies.sh -B --settings .github/mvn-settings.xml + if [ `git status -s -u no '*pom.xml' | wc -l` -ne 0 ] + then + echo -e '\033[0;31mError:\033[0m Dependencies to extension artifacts are outdated! Run ./update-extension-dependencies.sh and add the modified pom.xml files to your commit.' 1>&2 + exit 1 + fi + - name: Tar Maven Repo + shell: bash + run: tar -I 'pigz -9' -cf maven-repo.tgz -C ~ .m2/repository + - name: Persist Maven Repo + uses: actions/upload-artifact@v1 + with: + name: maven-repo + path: maven-repo.tgz + - name: Delete Local Artifacts From Cache + shell: bash + run: rm -r ~/.m2/repository/io/quarkus + + linux-jvm-tests: + name: JVM Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 240 + env: + MAVEN_OPTS: -Xmx2048m -XX:MaxMetaspaceSize=1000m + strategy: + fail-fast: false + matrix: + java : + - { name: "8", + java-version: 8, + maven_args: "-pl !integration-tests/kubernetes/quarkus-standard-way" + } + - { + name: "11", + java-version: 11, + maven_args: "" + } + - { + name: "15", + java-version: 15, + maven_args: "-pl !integration-tests/kubernetes/quarkus-standard-way" + } + + steps: + - name: Stop mysql + shell: bash + run: | + ss -ln + sudo service mysql stop || true + + - uses: actions/checkout@v2 + + - name: apt clean + shell: bash + run: sudo apt-get clean + + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + release: ${{ matrix.java.release }} + + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + run: eval mvn $JVM_TEST_MAVEN_OPTS install -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devtools ${{ matrix.java.maven_args}} + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-jvm${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-tests: + name: JVM Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 180 + env: + MAVEN_OPTS: -Xmx1408m -XX:MaxMetaspaceSize=1000m + + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + shell: bash + run: mvn -B --settings .github/mvn-settings.xml -DskipDocs -Dformat.skip -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devtools install + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: | + # Disambiguate windows find from cygwin find + /usr/bin/find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-jdk11-jvm + path: 'test-reports.tgz' + + linux-jvm-maven-tests: + name: Maven Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + java: + - { + name: "11", + java-version: 11 + } + steps: + - uses: actions/checkout@v2 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + - name: Run Maven integration tests + run: eval mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/maven' + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-maven-java${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-maven-tests: + name: Maven Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v2 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Run Maven integration tests + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/maven' + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-maven-java11 + path: 'test-reports.tgz' + + linux-jvm-gradle-tests: + name: Gradle Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + java: + - { + name: "11", + java-version: 11 + } + steps: + - uses: actions/checkout@v2 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + - name: Build with Gradle + uses: eskatos/gradle-command-action@v1 + env: + GRADLE_OPTS: -Xmx1408m + with: + gradle-version: wrapper + wrapper-directory: integration-tests/gradle + build-root-directory: integration-tests/gradle + arguments: clean test -i -S --stacktrace --no-daemon + + windows-jdk11-jvm-gradle-tests: + name: Gradle Tests - JDK 11 Windows + needs: build-jdk11 + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + runs-on: windows-latest + timeout-minutes: 80 + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Gradle + uses: eskatos/gradle-command-action@v1 + timeout-minutes: 60 + env: + GRADLE_OPTS: -Xmx1408m + with: + gradle-version: wrapper + wrapper-directory: integration-tests/gradle + build-root-directory: integration-tests/gradle + arguments: clean test -i -S --stacktrace --no-daemon + + linux-jvm-devtools-tests: + name: Devtools Tests - JDK ${{matrix.java.name}} + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + java: + - { + name: "11", + java-version: 11 + } + steps: + - uses: actions/checkout@v2 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK ${{ matrix.java.name }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: ${{ matrix.java.java-version }} + - name: Run Devtools integration tests + run: eval mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/devtools' + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-linux-devtools-java${{matrix.java.name}} + path: 'test-reports.tgz' + + windows-jdk11-jvm-devtools-tests: + name: Devtools Tests - JDK 11 Windows + runs-on: windows-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + needs: build-jdk11 + timeout-minutes: 60 + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v2 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Run Devtools integration tests + shell: bash + run: mvn $JVM_TEST_MAVEN_OPTS install -pl 'integration-tests/devtools' + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-windows-devtools-java11 + path: 'test-reports.tgz' + + tcks-test: + name: MicroProfile TCKs Tests + needs: build-jdk11 + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + runs-on: ubuntu-latest + timeout-minutes: 150 + + steps: + - uses: actions/checkout@v2 + with: + path: tcks + - name: Download RESTEasy Reactive Testsuite + uses: actions/checkout@v2 + with: + repository: quarkusio/resteasy-reactive-testsuite + path: resteasy-reactive-testsuite + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + working-directory: ./tcks + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Run RESTEasy Reactive TCK + run: mvn -B --settings ../tcks/.github/mvn-settings.xml install + working-directory: ./resteasy-reactive-testsuite + - name: Verify with Maven + run: mvn -B --settings .github/mvn-settings.xml -f tcks/pom.xml install + working-directory: ./tcks + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-tcks + path: 'test-reports.tgz' + + native-tests-read-json-matrix: + name: Native Tests - Read JSON matrix + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + outputs: + matrix: ${{ steps.read.outputs.matrix }} + steps: + - uses: actions/checkout@v2 + - id: read + run: | + json=$(tr -d '\n' < .github/native-tests.json ) + echo $json + echo "::set-output name=matrix::${json}" + + native-tests: + name: Native Tests - ${{matrix.category}} + needs: [build-jdk11, native-tests-read-json-matrix] + runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + # Ignore the following YAML Schema error + timeout-minutes: ${{matrix.timeout}} + strategy: + max-parallel: 8 + fail-fast: false + matrix: ${{ fromJson(needs.native-tests-read-json-matrix.outputs.matrix) }} + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + # We do this so we can get better analytics for the downloaded version of the build images + - name: Update Docker Client User Agent + shell: bash + run: | + cat <<< $(jq '.HttpHeaders += {"User-Agent": "Quarkus-CI-Docker-Client"}' ~/.docker/config.json) > ~/.docker/config.json + - name: Download Maven Repo + uses: actions/download-artifact@v1 + with: + name: maven-repo + path: . + - name: Extract Maven Repo + shell: bash + run: tar -xzf maven-repo.tgz -C ~ + - name: Build with Maven + env: + TEST_MODULES: ${{matrix.test-modules}} + CATEGORY: ${{matrix.category}} + run: | + for i in $TEST_MODULES + do modules+=("integration-tests/$i"); done + IFS=, + eval mvn -pl "${modules[*]}" $NATIVE_TEST_MAVEN_OPTS + # add the 'simple with spaces' project to the run of 'Misc1' by executing it explicitly + # done because there is no good way to pass strings with empty values to the previous command + # so this hack is as good as any + if [ "$CATEGORY" == "Misc1" ]; then + mvn -Dnative -Dquarkus.native.container-build=true -B --settings .github/mvn-settings.xml -f 'integration-tests/simple with space/' verify + fi + - name: Prepare failure archive (if maven failed) + if: failure() + shell: bash + run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' -o -name '*.log' | tar -czf test-reports.tgz -T - + - name: Upload failure Archive (if maven failed) + uses: actions/upload-artifact@v1 + if: failure() + with: + name: test-reports-native-${{matrix.category}} + path: 'test-reports.tgz' diff --git a/.github/workflows/ci-fork-mvn-cache.yml b/.github/workflows/ci-fork-mvn-cache.yml new file mode 100644 index 0000000000000..4ea53d6482213 --- /dev/null +++ b/.github/workflows/ci-fork-mvn-cache.yml @@ -0,0 +1,61 @@ +name: Quarkus CI Fork Maven Cache + +on: + push: + branches: + - 'main' + # paths-ignore should match ci-actions.yml + paths-ignore: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - 'docs/src/main/asciidoc/**' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + - '.github/*.conf' + schedule: + # first day of month at 12:10am + - cron: '10 0 1 * *' + +env: + LANG: en_US.UTF-8 +jobs: + build-jdk11: + name: "Quick JDK 11 Build" + runs-on: ubuntu-latest + # Skip in main repo + if: github.repository != 'quarkusio/quarkus' + steps: + - uses: actions/checkout@v2 + - uses: n1hility/cancel-previous-runs@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + workflow: ci-fork-mvn-cache.yml + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + - name: Set up JDK 11 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 11 + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" + shell: bash + - name: Cache Maven Repository + id: cache-maven + uses: actions/cache@v2 + with: + path: ~/.m2/repository + # refresh cache every month to avoid unlimited growth + key: q2maven-${{ steps.get-date.outputs.date }} + - name: Build + run: | + mvn -T1C -e -B --settings .github/mvn-settings.xml -Dquickly-ci -Dtcks clean install + - name: Delete Local Artifacts From Cache + shell: bash + run: rm -r ~/.m2/repository/io/quarkus diff --git a/.github/workflows/ci-sanity-check.yml b/.github/workflows/ci-sanity-check.yml new file mode 100644 index 0000000000000..b5670a797133c --- /dev/null +++ b/.github/workflows/ci-sanity-check.yml @@ -0,0 +1,33 @@ +# Create the CI Sanity Check entry even when we only have ignored files +name: Quarkus CI Sanity Check + +on: + push: + paths: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + pull_request: + paths: + - '.gitignore' + - '.dockerignore' + - '*.md' + - '*.adoc' + - '*.txt' + - '.github/ISSUE_TEMPLATE/**' + - '.github/*.yml' + - '.github/*.java' + +jobs: + ci-sanity-check: + name: "CI Sanity Check" + runs-on: ubuntu-latest + if: github.repository == 'quarkusio/quarkus' + steps: + - name: Build + run: sleep 1 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000000000..2f95a12fa9821 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,57 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +name: "CodeQL Security Scan" + +on: + + schedule: + - cron: '0 3 * * 0' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + if: github.repository == 'quarkusio/quarkus' + + strategy: + fail-fast: false + matrix: + # Override automatic language detection by changing the below list + # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] + language: ['java'] + # Learn more... + # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + fetch-depth: 1 + ref: main + - name: Setup Java JDK + uses: actions/setup-java@v1 + with: + java-version: 11 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + + - if: matrix.language == 'java' + name: Build Java + run: mvn -B --settings .github/mvn-settings.xml -Dquickly-ci install + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/deploy-snapshots.yml b/.github/workflows/deploy-snapshots.yml new file mode 100644 index 0000000000000..211226573c11a --- /dev/null +++ b/.github/workflows/deploy-snapshots.yml @@ -0,0 +1,66 @@ +name: Quarkus Deploy Snapshots + +on: + schedule: + - cron: '0 2 * * *' + +env: + LANG: en_US.UTF-8 +jobs: + build-and-deploy: + name: "Build and deploy" + runs-on: ubuntu-latest + if: github.repository == 'quarkusio/quarkus' + steps: + - uses: actions/checkout@v2 + with: + ref: main + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + - name: Set up JDK 8 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 8 + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" + shell: bash + - name: Cache Maven Repository + id: cache-maven + uses: actions/cache@v2 + with: + path: ~/.m2/repository + # refresh cache every month to avoid unlimited growth + key: q2maven-${{ steps.get-date.outputs.date }} + - name: Build and Deploy + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} + SERVER_USERNAME: ${{ secrets.SERVER_USERNAME }} + SERVER_PASSWORD: ${{ secrets.SERVER_PASSWORD }} + run: | + mvn -e -B --settings .github/mvn-settings.xml \ + -DskipITs -Dno-format -Dinvoker.skip=true \ + -DretryFailedDeploymentCount=10 \ + clean deploy + - name: Delete Local Artifacts From Cache + shell: bash + run: rm -r ~/.m2/repository/io/quarkus + + - name: Report + if: always() + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} + STATUS: ${{ job.status }} + ISSUE_NUMBER: "12111" + run: | + echo "The report step got status: ${STATUS}" + sudo apt-get update -o Dir::Etc::sourcelist="sources.list" \ + -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" + sudo apt-get install -y gnupg2 gnupg-agent + echo Installing SDKMAN + curl -s "https://get.sdkman.io" | bash + source ~/.sdkman/bin/sdkman-init.sh && \ + sdk install jbang 0.36.1 + jbang .github/NativeBuildReport.java token="${GITHUB_TOKEN}" status="${STATUS}" issueRepo="${GITHUB_REPOSITORY}" issueNumber="${ISSUE_NUMBER}" thisRepo="${GITHUB_REPOSITORY}" runId="${GITHUB_RUN_ID}" diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml index cf501673c5e2c..4e0705179af53 100644 --- a/.github/workflows/doc-build.yml +++ b/.github/workflows/doc-build.yml @@ -4,15 +4,25 @@ on: push: paths: - 'docs/src/main/asciidoc/**' + - '.github/workflows/doc-build.yml' pull_request: paths: - 'docs/src/main/asciidoc/**' + - '.github/workflows/doc-build.yml' jobs: + ci-sanity-check: + name: "CI Sanity Check" + runs-on: ubuntu-latest + steps: + - name: Build + run: sleep 30 build-doc: name: "Documentation Build" runs-on: ubuntu-latest + # Skip main in forks + if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" steps: - uses: actions/checkout@v2 - uses: n1hility/cancel-previous-runs@v2 @@ -20,25 +30,23 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Set up JDK 11 # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 with: java-version: 11 - - name: Compute cache restore key - # Always recompute on a push so that the maven repo doesnt grow indefinitely with old versions + - name: Get Date + id: get-date run: | - if ${{ github.event_name == 'pull_request' }}; then echo "::set-env name=COMPUTED_RESTORE_KEY::q2maven-"; fi + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" - name: Cache Maven Repository id: cache-maven - uses: n1hility/cache@v2 + uses: actions/cache@v2 with: path: ~/.m2/repository - # Improves the reusability of the cache to limit key changes - key: q2maven-${{ hashFiles('bom/runtime/pom.xml') }} - restore-keys: ${{ env.COMPUTED_RESTORE_KEY }} - restore-only: ${{ github.event_name == 'pull_request' }} + # refresh cache every month to avoid unlimited growth + key: q2maven-doc-${{ steps.get-date.outputs.date }} - name: Build run: | - mvn -Dquickly + mvn -Dquickly-ci -B --settings .github/mvn-settings.xml install - name: Build Docs run: | - mvn -e -B clean org.asciidoctor:asciidoctor-maven-plugin:process-asciidoc -pl docs -Ddocumentation-pdf \ No newline at end of file + mvn -e -B --settings .github/mvn-settings.xml clean org.asciidoctor:asciidoctor-maven-plugin:process-asciidoc -pl docs -Ddocumentation-pdf diff --git a/.github/workflows/native-cron-build.yml b/.github/workflows/native-cron-build.yml deleted file mode 100644 index aa414f8d2b99a..0000000000000 --- a/.github/workflows/native-cron-build.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: "Quarkus CI - JDK 8 Native Build" -on: - schedule: - - cron: '0 2 * * *' - repository_dispatch: - - -jobs: - build: - if: github.repository == 'quarkusio/quarkus' - runs-on: ubuntu-18.04 - strategy: - matrix: - java: [ 8 ] - name: build-and-testing - steps: - - - name: Stop MySQL - run: sudo systemctl stop mysql - - - name: Pull docker image - run: docker pull quay.io/quarkus/ubi-quarkus-native-image:20.1.0-java${{ matrix.java }} - - - name: Set up JDK ${{ matrix.java }} - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - if: matrix.java != '8' - with: - java-version: ${{ matrix.java }} - - - name: Set up JDK ${{ matrix.java }} - # Uses sha for added security since tags can be updated - uses: joschi/setup-jdk@b9cc6eabf7e7e3889766b5cee486f874c9e1bd2d - if: matrix.java == '8' - with: - java-version: ${{ matrix.java }} - # stay on a version before 252 for now because it breaks ALPN handling - release: jdk8u242-b08 - - - name: Checkout Quarkus - uses: actions/checkout@v2 - with: - repository: quarkusio/quarkus - ref: master - - - uses: actions/cache@v1 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-native-cron-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-native-cron- - - - name: Build Quarkus - run: mvn -B install -DskipTests -DskipITs -Dformat.skip - - - name: Run integration tests in native - run: mvn -B --settings .github/mvn-settings.xml verify -f integration-tests/pom.xml --fail-at-end -Dno-format -Ddocker -Dnative -Dquarkus.native.container-build=true -Dquarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-native-image:20.1.0-java${{ matrix.java }} -Dtest-postgresql -Dtest-elasticsearch -Dtest-mysql -Dtest-db2 -Dtest-amazon-services -Dtest-vault -Dtest-neo4j -Dtest-keycloak -Dtest-kafka -Dtest-mssql -Dtest-mariadb -Dmariadb.url="jdbc:mariadb://localhost:3308/hibernate_orm_test" -pl '!io.quarkus:quarkus-integration-test-google-cloud-functions-http,!io.quarkus:quarkus-integration-test-google-cloud-functions,!io.quarkus:quarkus-integration-test-funqy-google-cloud-functions' - - - name: Report - if: always() - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} - STATUS: ${{ job.status }} - JAVA_VERSION: ${{ matrix.java }} - run: | - echo "The report step got status: ${STATUS}" - sudo apt-get update -o Dir::Etc::sourcelist="sources.list" \ - -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" - sudo apt-get install -y gnupg2 gnupg-agent - echo Installing SDKMAN - curl -s "https://get.sdkman.io" | bash - source ~/.sdkman/bin/sdkman-init.sh && \ - sdk install jbang 0.21.0 - [[ ${JAVA_VERSION} = 8 ]] && ISSUE_NUMBER="6717" || ISSUE_NUMBER="6723" - jbang .github/NativeBuildReport.java token="${GITHUB_TOKEN}" status="${STATUS}" issueRepo="${GITHUB_REPOSITORY}" issueNumber="${ISSUE_NUMBER}" thisRepo="${GITHUB_REPOSITORY}" runId="${GITHUB_RUN_ID}" diff --git a/.github/workflows/native-cron-build.yml.disabled b/.github/workflows/native-cron-build.yml.disabled new file mode 100644 index 0000000000000..fc0f0e64187b8 --- /dev/null +++ b/.github/workflows/native-cron-build.yml.disabled @@ -0,0 +1,76 @@ +# Disabled because we mostly don't care about Java 8 native image +name: "Quarkus CI - JDK 8 Native Build" +on: + schedule: + - cron: '0 2 * * *' + repository_dispatch: + + +jobs: + build: + if: github.repository == 'quarkusio/quarkus' + runs-on: ubuntu-18.04 + strategy: + matrix: + java: [ 8 ] + name: build-and-testing + steps: + + - name: Stop MySQL + run: sudo systemctl stop mysql + + - name: Pull docker image + run: docker pull quay.io/quarkus/ubi-quarkus-native-image:20.2.0-java${{ matrix.java }} + + - name: Set up JDK ${{ matrix.java }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + if: matrix.java != '8' + with: + java-version: ${{ matrix.java }} + + - name: Set up JDK ${{ matrix.java }} + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + if: matrix.java == '8' + with: + java-version: ${{ matrix.java }} + # stay on a version before 252 for now because it breaks ALPN handling + release: jdk8u242-b08 + + - name: Checkout Quarkus + uses: actions/checkout@v2 + with: + repository: quarkusio/quarkus + ref: main + + - uses: actions/cache@v1 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-native-cron-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-native-cron- + + - name: Build Quarkus + run: mvn -B install -DskipTests -DskipITs -Dformat.skip + + - name: Run integration tests in native + run: mvn -B --settings .github/mvn-settings.xml verify -f integration-tests/pom.xml --fail-at-end -Dno-format -Dtest-containers -Dstart-containers -Dnative -Dquarkus.native.container-build=true -Dquarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-native-image:20.2.0-java${{ matrix.java }} -pl '!io.quarkus:quarkus-integration-test-google-cloud-functions-http,!io.quarkus:quarkus-integration-test-google-cloud-functions,!io.quarkus:quarkus-integration-test-funqy-google-cloud-functions' + + - name: Report + if: always() + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} + STATUS: ${{ job.status }} + JAVA_VERSION: ${{ matrix.java }} + run: | + echo "The report step got status: ${STATUS}" + sudo apt-get update -o Dir::Etc::sourcelist="sources.list" \ + -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" + sudo apt-get install -y gnupg2 gnupg-agent + echo Installing SDKMAN + curl -s "https://get.sdkman.io" | bash + source ~/.sdkman/bin/sdkman-init.sh && \ + sdk install jbang 0.21.0 + [[ ${JAVA_VERSION} = 8 ]] && ISSUE_NUMBER="6717" || ISSUE_NUMBER="6723" + jbang .github/NativeBuildReport.java token="${GITHUB_TOKEN}" status="${STATUS}" issueRepo="${GITHUB_REPOSITORY}" issueNumber="${ISSUE_NUMBER}" thisRepo="${GITHUB_REPOSITORY}" runId="${GITHUB_RUN_ID}" diff --git a/.github/workflows/quarkus-bot-lint.yml b/.github/workflows/quarkus-bot-lint.yml new file mode 100644 index 0000000000000..83689a18f5dbe --- /dev/null +++ b/.github/workflows/quarkus-bot-lint.yml @@ -0,0 +1,22 @@ +name: quarkus-bot.yml lint + +on: + push: + paths: + - '.github/quarkus-bot.yml' + pull_request: + paths: + - '.github/quarkus-bot.yml' + +jobs: + lint: + name: "quarkus-bot.yml validation" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: install yamllint + shell: bash + run: sudo apt-get install -y yamllint + - name: run yamllint for bot + shell: bash + run: yamllint -c .github/yamllint.conf .github/quarkus-bot.yml diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml new file mode 100644 index 0000000000000..78c1857f597e4 --- /dev/null +++ b/.github/workflows/release-build.yml @@ -0,0 +1,48 @@ +name: Quarkus Release Test Build + +on: + schedule: + - cron: '0 3 * * *' +env: + LANG: en_US.UTF-8 +jobs: + build: + name: "Prepare release" + runs-on: ubuntu-latest + if: github.repository == 'quarkusio/quarkus' + steps: + - uses: actions/checkout@v2 + with: + ref: main + - name: Reclaim Disk Space + run: .github/ci-prerequisites.sh + - name: Set up JDK 8 + # Uses sha for added security since tags can be updated + uses: joschi/setup-jdk@e87a7cec853d2dd7066adf837fe12bf0f3d45e52 + with: + java-version: 8 + - name: Create maven repo + run: mkdir -p $HOME/release/repository + - name: Build and Test + run: | + mvn --settings .github/mvn-settings.xml \ + -B \ + -DskipITs -Dinvoker.skip=true \ + -Dmaven.repo.local=$HOME/release/repository \ + clean install + - name: Report + if: always() + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} + STATUS: ${{ job.status }} + ISSUE_NUMBER: "13058" + run: | + echo "The report step got status: ${STATUS}" + sudo apt-get update -o Dir::Etc::sourcelist="sources.list" \ + -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" + sudo apt-get install -y gnupg2 gnupg-agent + echo Installing SDKMAN + curl -s "https://get.sdkman.io" | bash + source ~/.sdkman/bin/sdkman-init.sh && \ + sdk install jbang 0.52.1 + jbang .github/NativeBuildReport.java token="${GITHUB_TOKEN}" status="${STATUS}" issueRepo="${GITHUB_REPOSITORY}" issueNumber="${ISSUE_NUMBER}" thisRepo="${GITHUB_REPOSITORY}" runId="${GITHUB_RUN_ID}" diff --git a/.github/workflows/sonarcloud.yml.disabled b/.github/workflows/sonarcloud.yml.disabled new file mode 100644 index 0000000000000..aa0856ef4097d --- /dev/null +++ b/.github/workflows/sonarcloud.yml.disabled @@ -0,0 +1,39 @@ +name: Sonarcloud Analysis +#on: +# push: +# branches: +# - main +# pull_request: +# types: [opened, synchronize, reopened] +jobs: + build: + name: Build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y-%m")" + shell: bash + - name: Cache SonarCloud packages + uses: actions/cache@v2 + with: + path: ~/.sonar/cache + key: ${{ runner.os }}-sonar-${{ steps.get-date.outputs.date }} + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ steps.get-date.outputs.date }} + - name: Build and analyze + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_API_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + run: ./mvnw -B -Dquickly-ci install org.sonarsource.scanner.maven:sonar-maven-plugin:sonar diff --git a/.github/yamllint.conf b/.github/yamllint.conf new file mode 100644 index 0000000000000..1b6be046eb9a1 --- /dev/null +++ b/.github/yamllint.conf @@ -0,0 +1,5 @@ +extends: default + +rules: + line-length: disable + commas: disable diff --git a/.gitignore b/.gitignore index 595df374f0f03..d5e7198cd98ea 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,6 @@ graph-output.dot *.swo ObjectStore .gradle -build docker/distroless/bazel-* /.apt_generated_tests/ quarkus.log @@ -35,3 +34,5 @@ replay_*.logß nbactions.xml nb-configuration.xml .cache +/lsp/ +.jbang diff --git a/.mvn/maven.config b/.mvn/maven.config new file mode 100644 index 0000000000000..4230c24172f9b --- /dev/null +++ b/.mvn/maven.config @@ -0,0 +1,3 @@ +-Dmaven.wagon.httpconnectionManager.ttlSeconds=120 +-Dmaven.wagon.http.retryHandler.requestSentEnabled=true +-Dmaven.wagon.http.retryHandler.count=10 diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java index b901097f2db6e..e76d1f3241d38 100644 --- a/.mvn/wrapper/MavenWrapperDownloader.java +++ b/.mvn/wrapper/MavenWrapperDownloader.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/ADOPTERS.md b/ADOPTERS.md index ec1a7fc248f11..b25f054f1ef51 100644 --- a/ADOPTERS.md +++ b/ADOPTERS.md @@ -13,6 +13,9 @@ If any organization would like get added or removed please make a pull request b | Organization | Reference | |-----------------------|----------------------------------------------------------------------------------| |Artes | https://horvie.github.io/adopt-quarkus/ | +|B<>com | https://5g.labs.b-com.com/ | +|Carrefour | https://horizons.carrefour.com/efficient-java-in-the-cloud-with-quarkus | +|Cytech | https://quarkus.io/blog/cytech-customer-story/ | |Ennovative Solutions | https://quarkus.io/blog/ennovativesolutions-uses-quarkus-with-aws-lambda/ | |GoWithFlow | https://quarkus.io/blog/gowithflow-chooses-quarkus-to-deliver-fast-to-production/| |Lufthansa Technik | https://quarkus.io/blog/aviatar-experiences-significant-savings/ | @@ -20,4 +23,6 @@ If any organization would like get added or removed please make a pull request b |Sedona | https://quarkus.io/blog/sedona-rewrites-insurance-premium/ | |Suomen Asiakastieto Oy | https://quarkus.io/blog/asiakastieto-chooses-quarkus-for-microservices/ | |Talkdesk | https://quarkus.io/blog/talkdesk-chooses-quarkus-for-fast-innovation/ | +|UTN Faculty Córdoba | https://www.frc.utn.edu.ar/computos/tech/ | |Vodafone Greece | https://quarkus.io/blog/vodafone-greece-replaces-spring-boot/ | +|Wipro | https://quarkus.io/blog/wipro-customer-story/ | diff --git a/COMMITTERS.adoc b/COMMITTERS.adoc new file mode 100644 index 0000000000000..bfd6edef4d9b1 --- /dev/null +++ b/COMMITTERS.adoc @@ -0,0 +1,245 @@ += Information for Quarkus Committers + +This document contains useful information for Quarkus committers. + +[NOTE] +==== +It is a work in progress, so if you think information is missing/unclear/inadequate, +please reach out to us on the `quarkus-dev` mailing-list. +==== + +This document has a lot of information. +It is not a set of strict rules, it is here to guide you. +If you forget to do something, miss something, don't worry, it happens. + +Do your best and that will already be a lot :). + +== Merging Pull Requests + +The first thing is that, while being a committer, +you can't really commit directly to the main repository: +you need at least one review by your peers. + +Thus the need for a pull request. + +So you are more like a merger than a committer +and you might merge more PRs from others than your own work. + +While not absolute, here is some advice: + +* If you really want a review by a specific person, make it a named reviewer via GitHub's "Reviewers" UI +* If someone asked for a specific reviewer, better give some time for this person to review the PR. + If not reviewed in a reasonable amount of time, better ping them again to have some information + or ask the original author if we should go ahead. +* If in doubt (on a specific part or because you're not comfortable reviewing documentation or any other reason), + ask for help. +* Except if it's a total no-brainer (typically a typo fix), let the PR bake a few hours so some other people can have a + look if interested. + If the PR is very large or is a new extension, better let it + bake for a few days. +* We do merge commits so please ask the author to properly squash the commits before merging. + The idea is to have proper semantic commits. + If the author is not familiar with Git or not available, you can also (carefully) + https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/committing-changes-to-a-pull-request-branch-created-from-a-fork[do it for them] + as long as you both agreed on it. +* Make sure the commit comments are meaningful. +* As a new committer, you can start by giving your approval and not merging and see how things evolve. + This should give you some hints of important things to check in the future. +* Only merge pull requests that target main. + **Dealing with old branches is a separate process.** +* If you are sure a PR should go in and is just waiting for CI, + just use the `triage/waiting-for-ci` label. + It is a good hint for other committers. +* If you think a pull request is an essential part of a given milestone (and can realistically be a part of it), + you should affect this milestone to it so that we can be sure it stays on the radar. +* If you think a pull request is worth mentioning in the release notes and/or + the announcement blog post, add the `release/noteworthy-feature` label. +* Do NOT affect a PR to an old branch, see the <> for more details. + +Obviously, each situation is different so use your own judgement, +and if in doubt, just ask for advice, the other committers are here to help. + +[[release-schedule]] +== Release Schedule + +The release schedule is posted here: https://github.com/quarkusio/quarkus/wiki/Release-Planning . + +There are a few things you can do to help us be on schedule: + +* Anything that goes in a release should have been **merged** the day before the release. + That means: ++ + ** Your PR needs to be created, obviously. + ** It should be reviewed by someone familiar with this area. + ** You have to take this feedback into account. + ** And once approved, the PR can be merged. + ** Also, keep in mind that we have a 3 hours CI cycle and CI needs to be green before any inclusion. ++ +In short, posting a large PR the day before the release won't work. + +* In the case of a significant change, the release considered for inclusion shouldn't be + the `.Final` but the `.CR1.`. + Thus, your PR has to be merged the day before the release of `.CR1`. + +* If, for some reason, a PR really needs to be part of a given release, better coordinate with + the release manager and the potential reviewers so that everyone is aware something big is coming. + Potential reviewers might also have their own agenda and work to be included in the release, + thus not always having the cycles to review last minute. + +== Dependency Upgrades + +Quarkus depends on a lot of third party dependencies that we try to keep up to date. + +We partially rely on Dependabot for https://github.com/quarkusio/quarkus/blob/main/.github/dependabot.yml[a curated list of dependencies]. + +Some dependencies are critical for Quarkus and quite complex. +Good examples would be Vert.x or Hibernate ORM. + +It is good practice to update these core dependencies at the beginning of a given +release cycle to let them time to bake in the main branch before being released. + +== Backward Compatibility + +Quarkus evolves very fast and we decided we could break backward compatibility as long as: + +* It is worth it. +* We try to support the previous behavior for a while - if it doesn't have a prohibitive cost, + be it in terms of development time or added complexity. + +Usually, it is a good idea to have the opinion of people involved in the development of this area. +And for widely used extensions or core changes, sending an email on the `quarkus-dev` mailing list +should be considered. + +Breaking changes should be marked with the `release/breaking-change` label and, +if they need to be included in the release notes and/or announcement blog post, +they should also be labelled with `release/noteworthy-feature`. + +They must be documented in the migration guide of the appropriate version: +https://github.com/quarkusio/quarkus/wiki/Migration-Guides. + +== Pull Request Title + +The title should be in English, should not contain an issue number. +It should also not contain ellipsis. + +If your commit message was too long and GitHub automatically cut +the title, it is helpful if you can take the time to move the cut +part where it belongs to have a full title. + +Titles are included in the Release notes so they are important. + +A good title would look like: `Fix off by one issue in Quartz extension` or +`Introduce Hibernate Reactive extension`. + +A bad title would look like: `fix(#444)`. + +== Issues Fixed + +When a PR fixes some issues, it's good practice to add it in the description (and not in the title!). + +One issue per line with something like: + +[source,asciidoc] +---- +* Fix #444 +* Fix #555 +---- + +Given GitHub automatically extracts the commit information to fill in the PR fields, +just make your commit comment look like: + +[source] +---- +Fix off by one issue in Quartz extension + +* Fix #444 +* Fix #555 +---- + +[TIP] +==== +GitHub supports a variety of keywords here: `fix`, `fixes`, `fixed`, +`resolve`, `resolves`, `resolved`, `close`, `closes`, `closed` +all do the same thing. +==== + +[WARNING] +==== +GitHub won't detect issues properly if you do something like +`Fix #444 #555`. +==== + +== Affecting Labels and Milestones + +Affecting labels and milestones is very important in our process. + +Before each release, we check that we don't have any +closed issues that don't have either a milestone affected or +some of the "excluding" labels. + +Thus: + +* If you close a pull request because the committers have decided to not merge it, + please add the appropriate `triage/` label: `triage/invalid`, `triage/out-of-date`, + `triage/wontfix` are usually in order. +* If you close an issue because it has been fixed, either add the milestone + if you can find it easily or use the `triage/out-of-date` if you can't. +* If you close an issue for any other reason, one of the aforementioned `triage/` labels + is probably adequate. +* Some issues are created as `kind/bug` but are more support questions: + in this case, remove the `kind/bug` label and add the `kind/question` label. + +[[backporting-process]] +== Backporting Process + +When we release a new version of Quarkus, we usually do a bugfix +release a couple of weeks after. + +Every time we do a major release (e.g. `1.7.0.Final`), we create a release branch (e.g. `1.7`) to host +the commits for these bugfix releases. + +All the pull requests are merged in the `main` branch so they are applied to the new feature +release of Quarkus. +They won't be integrated in the previous version branch. + +Some pull requests however may qualify for being backported to this +bugfix branch. + +Good examples of that might be: + +* bugfixes +* documentation fixes +* usability fixes + +Obviously, the barrier is higher for large pull requests as +they might be more risky to backport. +But sometimes, we just have to backport them anyway. + +If you think your pull request or the pull request you are reviewing, might be a good backport candidate, +please add the `triage/backport?` label. + +The question mark is important: +it is not automatic and we carefully review each pull request before backporting. + +Thus, if not entirely clear, don't hesitate to add a comment to the pull request +when adding the label. + +And don't be surprised if we come to you with some questions about it +when we prepare the next bugfix release. + +== Good First Issues + +We need to find the right balance between fixing the issue right away +and trying to onboard new contributors. + +It's not always easy to find one, but if you think the issue is appropriate, +affecting it the `good first issue` label for some time might be a good thing. + +Obviously, critical bugs are not good candidates :). + +== I Did Something Wrong, What Should I Do? + +Take a deep breath and don't worry, it happens. + +Just ping `@quarkusio/committerhelp` on GitHub or `@committerhelp` on Zulip +and we will find a solution. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 742c86f6bae01..5b74c54ae1dcd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,6 +5,42 @@ We try to make it easy, and all contributions, even the smaller ones, are more t This includes bug reports, fixes, documentation, examples... But first, read this page (including the small print at the end). +* [Legal](#legal) +* [Reporting an issue](#reporting-an-issue) +* [Checking an issue is fixed in main](#checking-an-issue-is-fixed-in-main) + + [Using snapshots](#using-snapshots) + + [Building main](#building-main) + + [Updating the version](#updating-the-version) +* [Before you contribute](#before-you-contribute) + + [Code reviews](#code-reviews) + + [Coding Guidelines](#coding-guidelines) + + [Continuous Integration](#continuous-integration) + + [Tests and documentation are not optional](#tests-and-documentation-are-not-optional) +* [Setup](#setup) + + [IDE Config and Code Style](#ide-config-and-code-style) + - [Eclipse Setup](#eclipse-setup) + - [IDEA Setup](#idea-setup) +* [Build](#build) + + [Workflow tips](#workflow-tips) + - [Building all modules of an extension](#building-all-modules-of-an-extension) + - [Building a single module of an extension](#building-a-single-module-of-an-extension) + - [Running a single test](#running-a-single-test) + - [Automatic incremental build](#automatic-incremental-build) + * [Special case `bom-descriptor-json`](#special-case--bom-descriptor-json-) + * [Usage by CI](#usage-by-ci) +* [Usage](#usage) + - [With Maven](#with-maven) + - [With Gradle](#with-gradle) + + [MicroProfile TCK's](#microprofile-tck-s) + + [Test Coverage](#test-coverage) +* [Extensions](#extensions) + + [Descriptions](#descriptions) + + [Update dependencies to extensions](#update-dependencies-to-extensions) +* [The small print](#the-small-print) +* [Frequently Asked Questions](#frequently-asked-questions) + +Table of contents generated with markdown-toc + ## Legal All original contributions to Quarkus are licensed under the @@ -22,15 +58,15 @@ This project uses GitHub issues to manage the issues. Open an issue directly in If you believe you found a bug, and it's likely possible, please indicate a way to reproduce it, what you are seeing and what you would expect to see. Don't forget to indicate your Quarkus, Java, Maven/Gradle and GraalVM version. -## Checking an issue is fixed in master +## Checking an issue is fixed in main -Sometimes a bug has been fixed in the `master` branch of Quarkus and you want to confirm it is fixed for your own application. -Testing the `master` branch is easy and you have two options: +Sometimes a bug has been fixed in the `main` branch of Quarkus and you want to confirm it is fixed for your own application. +Testing the `main` branch is easy and you have two options: * either use the snapshots we publish daily on https://oss.sonatype.org/content/repositories/snapshots/ * or build Quarkus all by yourself -This is a quick summary to get you to quickly test master. +This is a quick summary to get you to quickly test main. If you are interested in having more details, refer to the [Build section](#build) and the [Usage section](#usage). ### Using snapshots @@ -41,7 +77,7 @@ Then just add https://oss.sonatype.org/content/repositories/snapshots/ as a Mave You can check the last publication date here: https://oss.sonatype.org/content/repositories/snapshots/io/quarkus/ . -### Building master +### Building main Just do the following: @@ -56,7 +92,7 @@ Wait for a bit and you're done. ### Updating the version -Be careful, when using the `master` branch, you need to use the `quarkus-bom` instead of the `quarkus-universe-bom`. +Be careful, when using the `main` branch, you need to use the `quarkus-bom` instead of the `quarkus-universe-bom`. Update both the versions of the `quarkus-bom` and the Quarkus Maven plugin to `999-SNAPSHOT`. @@ -66,6 +102,17 @@ You can now test your application. To contribute, use GitHub Pull Requests, from your **own** fork. +Also, make sure you have set up your Git authorship correctly: + +``` +git config --global user.name "Your Full Name" +git config --global user.email your.email@example.com +``` + +If you use different computers to contribute, please make sure the name is the same on all your computers. + +We use this information to acknowledge your contributions in release announcements. + ### Code reviews All submissions, including submissions by project members, need to be reviewed before being merged. @@ -82,6 +129,8 @@ Because we are all humans, and to ensure Quarkus is stable for everyone, all cha The process requires only one additional step to enable Actions on your fork (clicking the green button in the actions tab). [See the full video walkthrough](https://youtu.be/egqbx-Q-Cbg) for more details on how to do this. +To keep the caching of non-Quarkus artifacts efficient (speeding up CI), you should occasionally sync the `main` branch of your fork with `main` of this repo (e.g. monthly). + ### Tests and documentation are not optional Don't forget to include tests in your pull requests. @@ -97,7 +146,7 @@ Be sure to test your pull request in: If you have not done so on this machine, you need to: * Install Git and configure your GitHub access -* Install Java SDK (OpenJDK recommended) +* Install Java SDK 8 or 11+ (OpenJDK recommended) * Install [GraalVM](https://quarkus.io/guides/building-native-image) * Install platform C developer tools: * Linux @@ -132,7 +181,34 @@ Next navigate to _Java_ -> _Code Style_ -> _Organize Imports_. Click _Import_ an #### IDEA Setup -Open the _Preferences_ window (or _Settings_ depending on your edition) , navigate to _Plugins_ and install the [Eclipse Code Formatter Plugin](https://plugins.jetbrains.com/plugin/6546-eclipse-code-formatter) from the Marketplace. +##### How to work + +Quarkus is a large project and IDEA will have a hard time compiling the whole of it. +Before you start coding, make sure to build the project using Maven from the commandline +with `./mvnw -Dquickly`. + +##### `OutOfMemoryError` while importing + +After creating an IDEA project, the first import will most likely fail with an `OutOfMemoryError`. + +To fix that, open the _Preferences_ window (or _Settings_ depending on your edition), +then navigate to _Build, Execution, Deployment_ > _Build Tools_ > _Maven_ > _Importing_. +In _VM options for importer_, raise the heap to at least 2 GB, e.g. `-Xmx2g -Xms2g`. + +##### `package sun.misc does not exist` while building + +You may get an error like this during the build: + +``` +Error:(46, 56) java: package sun.misc does not exist +``` + +To fix this go to _Settings_ > _Build, Execution, Deployment_ > _Compiler_ > _Java Compiler_ +and disable _Use '--release' option for cross compilation (java 9 and later)_. + +##### Formatting + +Open the _Preferences_ window (or _Settings_ depending on your edition), navigate to _Plugins_ and install the [Eclipse Code Formatter Plugin](https://plugins.jetbrains.com/plugin/6546-eclipse-code-formatter) from the Marketplace. Restart your IDE, open the *Preferences* (or *Settings*) window again and navigate to _Other Settings_ -> _Eclipse Code Formatter_. @@ -140,6 +216,10 @@ Select _Use the Eclipse Code Formatter_, then change the _Eclipse Java Formatter `eclipse-format.xml` file in the `independent-projects/ide-config` directory. Make sure the _Optimize Imports_ box is ticked, and select the `eclipse.importorder` file as the import order config file. +Next, disable wildcard imports: +navigate to _Editor_ -> _Code Style_ -> _Java_ -> _Imports_ +and set _Class count to use import with '\*'_ to `999`. +Do the same with _Names count to use static import with '\*'_. ## Build @@ -202,13 +282,50 @@ In this command we use the groupId and artifactId of the module to identify it. #### Running a single test -Often you need to run a single test from some Maven module. Say for example you want to run the `GreetingResourceTest` of the `resteasy-jackson` Quarkus integration test (which can be found [here](https://github.com/quarkusio/quarkus/blob/master/integration-tests/resteasy-jackson)). +Often you need to run a single test from some Maven module. Say for example you want to run the `GreetingResourceTest` of the `resteasy-jackson` Quarkus integration test (which can be found [here](https://github.com/quarkusio/quarkus/blob/main/integration-tests/resteasy-jackson)). One way to accomplish this is by executing the following command: ``` ./mvnw test -f integration-tests/resteasy-jackson/ -Dtest=GreetingResourceTest ``` +#### Automatic incremental build + +:information_source: This feature is currently in testing mode. You're invited to give it a go and please reach out via [Zulip](https://quarkusio.zulipchat.com/#narrow/stream/187038-dev) or GitHub in case something doesn't work as expected or you have ideas to improve things. + +Instead of _manually_ specifying the modules to build as in the previous examples, you can tell [gitflow-incremental-builder (GIB)](https://github.com/gitflow-incremental-builder/gitflow-incremental-builder) to only build the modules that have been changed or depend on modules that have been changed (downstream). +E.g.: +``` +./mvnw install -Dincremental +``` +This will build all modules (and their downstream modules) that have been changed compared to your _local_ `main`, including untracked and uncommitted changes. + +If you just want to build the changes since the last commit on the current branch, you can switch off the branch comparison via `-Dgib.disableBranchComparison` (or short: `-Dgib.dbc`). + +There are many more configuration options in GIB you can use to customize its behaviour: https://github.com/gitflow-incremental-builder/gitflow-incremental-builder#configuration + +Parallel builds (`-T...`) should work without problems but parallel test execution is not yet supported (in general, not a GIB limitation). + +##### Special case `bom-descriptor-json` + +Without going too much into details (`devtools/bom-descriptor-json/pom.xml` has more info), you should build this module _without_ `-Dincremental` _if you changed any extension "metadata"_: + +* Addition/renaming/removal of an extension +* Any other changes to any `quarkus-extension.yaml` + +##### Usage by CI + +The GitHub Actions based Quarkus CI is using GIB to reduce the average build time of pull request builds and builds of branches in your fork. + +CI is using a slighty different GIB config than locally: + +* [Special handling of "Explicitly selected projects"](https://github.com/gitflow-incremental-builder/gitflow-incremental-builder#explicitly-selected-projects) is deactivated +* Untracked/uncommitted changes are not considered +* Branch comparison is more complex due to distributed GitHub forks +* Certain "critical" branches like `main` are not built incrementally + +For more details see the `Get GIB arguments` step in `.github/workflows/ci-actions-incremental.yml`. + ## Usage After the build was successful, the artifacts are available in your local Maven repository. @@ -248,7 +365,6 @@ quarkusPlatformGroupId=io.quarkus pluginManagement { repositories { mavenLocal() // add mavenLocal() to first position - jcenter() mavenCentral() gradlePluginPortal() } @@ -263,7 +379,6 @@ pluginManagement { ``` repositories { mavenLocal() // add mavenLocal() to first position - jcenter() mavenCentral() } ``` @@ -292,6 +407,8 @@ then change into the `coverage-report` directory and run `mvn package`. The code This currently does not work on Windows as it uses a shell script to copy all the classes and files into the code coverage module. +If you just need a report for a single module, run `mvn install jacoco:report -Ptest-coverage` in that module (or with `-f ...`). + ## Extensions ### Descriptions @@ -317,6 +434,14 @@ Bad examples and the corresponding good example: - "PostgreSQL database connector" (use "Connect to the PostgreSQL database via JDBC") - "Asynchronous messaging for Reactive Streams" (use "Produce and consume messages and implement event driven and data streaming applications") +### Update dependencies to extensions + +When adding a new extension you should run `update-extension-dependencies.sh` so that special modules like `devtools/bom-descriptor-json` +that are consuming this extension are built *after* the respective extension. Simply add to your commit the files that were changed by the script. + +When removing an extension make sure to also remove all dependencies to it from all `pom.xml`. +It's easy to miss this as long as the extension artifact is still present in your local Maven repository. + ## The small print This project is an open source project, please act responsibly, be nice, polite and enjoy! @@ -325,29 +450,43 @@ This project is an open source project, please act responsibly, be nice, polite * The Maven build fails with `OutOfMemoryException` -Set Maven options to use 1.5GB of heap: `export MAVEN_OPTS="-Xmx1563m"`. + Set Maven options to use 1.5GB of heap: `export MAVEN_OPTS="-Xmx1563m"`. * IntelliJ fails to import Quarkus Maven project with `java.lang.OutOfMemoryError: GC overhead limit exceeded` -In IntelliJ IDEA (version older than `2019.2`) if you see problems in the Maven view claiming `java.lang.OutOfMemoryError: GC overhead limit exceeded` that means the project import failed. - -To fix the issue, you need to update the Maven importing settings: -`Build, Execution, Deployment` > `Build Tools`> `Maven` > `Importing` > `VM options for importer` -To import Quarkus you need to define the JVM Max Heap Size (E.g. `-Xmx1g`) + In IntelliJ IDEA (version older than `2019.2`) if you see problems in the Maven view claiming `java.lang.OutOfMemoryError: GC overhead limit exceeded` that means the project import failed. -**Note** As for now, we can't provide a unique Max Heap Size value. We have been reported to require from 768M to more than 3G to import Quarkus properly. + To fix the issue, you need to update the Maven importing settings: + `Build, Execution, Deployment` > `Build Tools`> `Maven` > `Importing` > `VM options for importer` + To import Quarkus you need to define the JVM Max Heap Size (E.g. `-Xmx1g`) -* Build hangs with DevMojoIT running infinitely -``` -./mvnw clean install -# Wait... -[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.192 s - in io.quarkus.maven.it.GenerateConfigIT -[INFO] Running io.quarkus.maven.it.DevMojoIT + **Note** As for now, we can't provide a unique Max Heap Size value. We have been reported to require from 768M to more than 3G to import Quarkus properly. -``` -DevMojoIT require a few minutes to run but anything more than that is not expected. Make sure that nothing is running on 8080. +* Build hangs with DevMojoIT running infinitely + ``` + ./mvnw clean install + # Wait... + [INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.192 s - in io.quarkus.maven.it.GenerateConfigIT + [INFO] Running io.quarkus.maven.it.DevMojoIT + ``` + DevMojoIT require a few minutes to run but anything more than that is not expected. Make sure that nothing is running on 8080. * The native integration test for my extension didn't run in the CI -In the interest of speeding up CI, the native build stage `run_native_tests_stage` have been split into multiple steps. -This means that each new extension needs to be configured explicitly in `azure-pipelines.yml` to have it's integration tests run in native mode + In the interest of speeding up CI, the native build job `native-tests` have been split into multiple categories which are run in parallel. + This means that each new extension needs to be configured explicitly in [`native-tests.json`](.github/native-tests.json) to have its integration tests run in native mode. + +* Build aborts complaining about missing (or superfluous) `minimal *-deployment dependencies` or `illegal runtime dependencies` + + To ensure a consistent build order, even when building in parallel (`./mvnw -T...`) or building incrementally/partially (`./mvnw -pl...`), the build enforces the presence of certain dependencies. + If those dependencies are not present, your local build will most likely use possibly outdated artifacts from you local repo and CI build might even fail not finding certain artifacts. + + Just do what the failing enforcer rule is telling you and you should be fine. + +* Build fails with multiple `This project has been banned from the build due to previous failures` messages + + Just scroll up, there should be an error or warning somewhere. Failing enforcer rules are known to cause such effects and in this case there'll be something like: + ``` + [WARNING] Rule 0: ... failed with message: + ... + ``` diff --git a/LICENSE.txt b/LICENSE.txt index d645695673349..ff9ad4530f571 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -193,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MAINTAINERS.adoc b/MAINTAINERS.adoc index f042e53803a99..44bc321e665b0 100644 --- a/MAINTAINERS.adoc +++ b/MAINTAINERS.adoc @@ -14,7 +14,7 @@ If you think some information is outdated, either provide a pull request or send |https://github.com/oztimpower[Timothy Power] |Amazon Lambda -|https://github.com/patriot1burke[Bill Burke], https://github.com/evanchooly[Justin Lee] +|https://github.com/patriot1burke[Bill Burke] |Amazon Services - KMS |https://github.com/marcinczeczko[Marcin Czeczko] @@ -52,6 +52,9 @@ If you think some information is outdated, either provide a pull request or send |Caffeine |https://github.com/galderz[Galder Zamarreño], https://github.com/wburns[William Burns] +|https://github.com/apache/camel-quarkus[Camel] +|https://github.com/jamesnetherton[James Netherton], https://github.com/ppalaga[Peter Palaga] + |Cassandra-Quarkus |https://github.com/adutra[Alexandre Dutra], https://github.com/tomekl007[Tomasz Lelek], https://github.com/absurdfarce[Bret McGuire], https://github.com/emerkle826[Erik Merkle], https://github.com/olim7t[Olivier Michallat] @@ -86,11 +89,14 @@ If you think some information is outdated, either provide a pull request or send |https://github.com/stuartwdouglas[Stuart Douglas] |Flyway -|https://github.com/gastaldi[George Gastaldi], https://github.com/geoand[Georgios Andrianakis] +|https://github.com/cristhiank[Cristhian Lopez], https://github.com/gastaldi[George Gastaldi], https://github.com/geoand[Georgios Andrianakis] |Funqy |https://github.com/patriot1burke[Bill Burke] +|Google Cloud Functions +|https://github.com/loicmathieu[Loïc Mathieu] + |gRPC |https://github.com/cescoffier[Clément Escoffier], https://github.com/michalszynkiewicz[Michal Szynkiewicz] @@ -100,6 +106,9 @@ If you think some information is outdated, either provide a pull request or send |Hibernate ORM with Panache |https://github.com/FroMage[Stéphane Épardaud] +|Hibernate ORM with Panache for Kotlin +|https://github.com/evanchooly[Justin Lee] + |Hibernate Search |https://github.com/gsmet[Guillaume Smet] @@ -155,7 +164,7 @@ If you think some information is outdated, either provide a pull request or send |https://github.com/mariofusco[Mario Fusco], https://github.com/cristianonicolai[Cristiano Nicolai] |Kotlin -|https://github.com/geoand[Georgios Andrianakis] +|https://github.com/geoand[Georgios Andrianakis], https://github.com/evanchooly[Justin Lee] |Kubernetes |https://github.com/geoand[Georgios Andrianakis], https://github.com/iocanel[Ioannis Canellos] @@ -176,7 +185,7 @@ If you think some information is outdated, either provide a pull request or send |https://github.com/cescoffier[Clément Escoffier] |MongoDB with Panache -|https://github.com/loicmathieu[Loïc Mathieu] +|https://github.com/loicmathieu[Loïc Mathieu], https://github.com/evanchooly[Justin Lee] |Narayana JTA |https://github.com/mmusgrov[Michael Musgrove], https://github.com/gsmet[Guillaume Smet] @@ -241,6 +250,9 @@ If you think some information is outdated, either provide a pull request or send |Scheduler |https://github.com/mkouba[Martin Kouba] +|SmallRye Config +|https://github.com/radcortez[Roberto Cortez] + |SmallRye Context Propagation |https://github.com/FroMage[Stéphane Épardaud], https://github.com/manovotn[Matěj Novotný] @@ -299,7 +311,7 @@ If you think some information is outdated, either provide a pull request or send |https://github.com/geoand[Georgios Andrianakis] |Swagger UI -|https://github.com/stuartwdouglas[Stuart Douglas] +|https://github.com/phillip-kruger[Phillip Kruger] |Tika |https://github.com/sberyozkin[Sergey Beryozkin] diff --git a/README.md b/README.md index 8ca122af24220..8d23a5df892c6 100644 --- a/README.md +++ b/README.md @@ -4,24 +4,24 @@ [![GitHub Actions Status]()](https://github.com/quarkusio/quarkus/actions?query=workflow%3A%22Quarkus+CI%22) [![License](https://img.shields.io/github/license/quarkusio/quarkus?style=for-the-badge&logo=apache)](https://www.apache.org/licenses/LICENSE-2.0) [![Project Chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg?style=for-the-badge&logo=zulip)](https://quarkusio.zulipchat.com/) -[![Supported JVM Versions](https://img.shields.io/badge/JVM-8--11--14-brightgreen.svg?style=for-the-badge&logo=Java)](https://github.com/quarkusio/quarkus/actions/runs/113853915/) +[![Supported JVM Versions](https://img.shields.io/badge/JVM-8--11--15-brightgreen.svg?style=for-the-badge&logo=Java)](https://github.com/quarkusio/quarkus/actions/runs/113853915/) # Quarkus - Supersonic Subatomic Java Quarkus is a Cloud Native, (Linux) Container First framework for writing Java applications. * **Container First**: -Minimal footprint Java applications optimal for running in containers +Minimal footprint Java applications optimal for running in containers. * **Cloud Native**: Embraces [12 factor architecture](https://12factor.net) in environments like Kubernetes. * **Unify imperative and reactive**: -Brings under one programming model non blocking and imperative styles of development. +Brings under one programming model non-blocking and imperative styles of development. * **Standards-based**: -Based on the standards and frameworks you love and use (RESTEasy and JAX-RS, Hibernate ORM and JPA, Netty, Eclipse Vert.x, Eclipse MicroProfile, Apache Camel...) +Based on the standards and frameworks you love and use (RESTEasy and JAX-RS, Hibernate ORM and JPA, Netty, Eclipse Vert.x, Eclipse MicroProfile, Apache Camel...). * **Microservice First**: -Brings lightning fast startup time and code turn around to Java apps +Brings lightning fast startup time and code turn around to Java apps. * **Developer Joy**: -Development centric experience without compromise to bring your amazing apps to life in no time +Development centric experience without compromise to bring your amazing apps to life in no time. _All under ONE framework._ @@ -30,9 +30,13 @@ _All under ONE framework._ * [Documentation](https://quarkus.io) * [Wiki](https://github.com/quarkusio/quarkus/wiki) +## Migration Guides + +We collect all the migration notes in our [migration guides](https://github.com/quarkusio/quarkus/wiki/Migration-Guides). + ## Release Planning -Interested on when the next release is coming? Check our [release planning](https://github.com/quarkusio/quarkus/wiki/Release-Planning) document for details +Interested in when the next release is coming? Check our [release planning](https://github.com/quarkusio/quarkus/wiki/Release-Planning) document for details. ## How to build Quarkus diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md index 982908f1b17cc..22c4fc7660118 100644 --- a/TROUBLESHOOTING.md +++ b/TROUBLESHOOTING.md @@ -144,7 +144,7 @@ It can be used in the same way as for the production application with the except ```shell script # profile CPU startup -mvn quarkus:dev -Djvm.args="agentpath:/path/to/async-profiler/build/libasyncProfiler.so=start,event=cpu,file=startup-cpu-profile.svg,interval=1000000,width=1600,simple" +mvn quarkus:dev -Djvm.args="-agentpath:/path/to/async-profiler/build/libasyncProfiler.so=start,event=cpu,file=startup-cpu-profile.svg,interval=1000000,width=1600,simple" # profile allocation startup mvn quarkus:dev -Djvm.args="-agentpath:/path/to/async-profiler/build/libasyncProfiler.so=start,event=alloc,file=/tmp/startup-alloc-profile.svg,interval=1000000,width=1600,simple" diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 2c4cca497b423..b235b2d40ebb0 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -14,38 +14,44 @@ pom - 2.5 - 2.2.1.Final - 4.5.6.Final + 1.68 + 1.0.2 + 1.0.11 + 2.2.3.Final + 4.5.9.Final 0.31.0 0.4.1 0.2.3 0.1.8 0.2.0 0.0.12 + 0.1.15 0.34.3 - 3.0.12.Final + 3.1.0.Beta2 1.0.0.Final + 1.4.3 + 1.6.5 + 0.22.0 1.4 2.3 1.0.1 1.3.3 1.0.1 1.4.1 - 1.1.0 - 1.8.4 - 2.2.3 - 2.4.2 - 2.0.5 - 1.0.5 - 1.3.4 - 4.3.0 - 2.2.0 - 1.0.13 + 1.1.1 + 1.5.0 + 1.11.1 + 2.2.6 + 2.4.6 + 2.0.26 + 1.0.22 + 1.3.5 + 4.3.2 + 2.4.4 + 1.1.0 1.0.13 - 1.1.0 - 2.2.1 - 3.30.0 + 1.4.0 + 2.9.0 1.2.1 1.3.5 3.0.3 @@ -61,142 +67,139 @@ 1.3.3 2.0.2 1.1.2 + 2.3.3 2.3.3-b02 2.3.1 2.0.1.Final 2.0.0.Final - 8.0.1 - 2.6 + 9.1 + 2.8.0 11.0.0.Final 3.6.3 1.4.1 3.0-alpha-2 - 27.0.1-jre + 30.1-jre 3.2.1 3.6.0 2.1.0 - 20.1.0 - 1.0.4.Final - 2.11.1 + 21.0.0 + 1.0.7.Final + 2.12.1 1.0.0.Final - 3.9 + 3.12.0 1.14 1.3.4 - 5.4.19.Final - 1.0.0.Alpha8 - 6.1.5.Final - 6.0.0.Beta8 - 5.10.5.Final + 5.4.29.Final + 1.0.0.CR1 + 6.2.0.Final + 6.0.2.Final + 5.10.6.Final 1.1.1.Final - 1.8 + 1.9 7.6.0.Final - 7.7.0 + 7.10.0 1.3.8 - 2.2.19 + 2.2.21 1.0.6.Final - 2.1.0.Final + 2.2.0.Final 1.7.30 - 1.2.0.Final + 1.1.0.Final 1.5.4.Final-format-001 1.0.1.Final - 1.12.1.Final + 1.15.1.Final 1.8.7.Final - 3.1.1.Final - 3.9.2 - 4.5.12 - 4.4.13 + 3.2.0.Final + 3.9.5 + 4.5.13 + 4.4.14 4.1.4 - 9.1.0 + 9.1.3 2.3.2 1.4.197 - 42.2.14 - 2.6.2 - 8.0.21 + 42.2.19 + 2.7.2 + 8.0.23 7.2.2.jre8 10.14.2.0 11.5.4.0 1.2.6 - 4.3.0 - 5.6.2 + 4.3.3 + 5.7.1 6.14.2 - 3.16.1 + 3.19.0 2.3 - 10.1.5.Final - 4.3.2.Final - 2.8.5 + 12.0.1.Final + 4.4.0.Alpha4 + 2.9.0 4.1.49.Final 1.0.3 - 3.3.2.Final - 0.7.0 - 1.1.0 - 1.1.0 - 2.5.0 + 3.4.1.Final + 0.14.0 + 1.5.0 + 2.7.0 3.5.7 - 2.12.9 - 1.22 + 2.13.5 + 1.24.1 1.4 1.2.1 - 3.1.0 + 3.7.0 1.3.1 - 2.4.0 - 2.13.62 - 2.31.0 - 1.3.0 - 1.3.72 - 0.12.6 + 2.8.0 + 2.16.19 + 2.37.1 + 1.4.0 + 1.4.31 + 0.14.2 0.10.0 - 2.14.6 3.0.1 4.0.3 1.6.5 - 1.0.4 - 5.8.0.202006091008-r - 6.5.2 - 1.0.7 - 3.10.1 - 1.26 + 1.0.6 + 5.11.0.202103091610-r + 7.7.0 + 1.0.8 + 4.3.1 + 1.28 6.0.0 - 4.1.1 - 4.0.3 + 4.2.3 + 4.1.0 1.0.1 - 2.11.0 - 0.33.6 - 3.14.6 - 1.7.30 - - 3.1.0 + 2.17.0 + 0.33.8 + 3.14.9 + 4.3.0 3.1.7 0.1.0 - 4.10.3 + 5.2.1 2.2.0 5.2.SP4 - 2.1.SP1 + 2.1.SP2 5.2.Final 2.1.SP1 2.40.0 - 3.4.4 + 3.8.0 5.3.1 4.8 - 1.1.2.Final - 11.0.0 - 1.14.0 + 1.1.4.Final + 12.0.3 + 1.14.1 0.1.55 1.1.1 2.5.2 2.2.0 - 0.15.0 - 1.34.0 + 0.17.0 + 1.38.0 2.1 - 1.30.2 + 1.34.0 1.0.1 - 3.12.2 - 3.12.3 + 3.14.0 + 3.13.0 - 4.4.0 + 4.6.1 0.3.4 1.0.1 1.20 @@ -205,10 +208,28 @@ 1.0.0.Beta1 1.2.0.Final 1.10.0 + 0.8.6 + 1.15.2 + + + io.quarkus + quarkus-bom-quarkus-platform-descriptor + ${project.version} + json + ${project.version} + + + + io.quarkus + quarkus-bom-quarkus-platform-properties + ${project.version} + properties + + @@ -260,7 +281,7 @@ io.vertx - vertx-stack-depchain + vertx-dependencies ${vertx.version} import pom @@ -284,6 +305,32 @@ import + + + io.smallrye.common + smallrye-common-bom + ${smallrye-common.version} + import + pom + + + + + io.micrometer + micrometer-bom + ${micrometer.version} + pom + import + + + + + org.testcontainers + testcontainers-bom + ${testcontainers.version} + pom + import + @@ -301,6 +348,11 @@ quarkus-development-mode-spi ${project.version} + + io.quarkus + quarkus-class-change-agent + ${project.version} + io.quarkus quarkus-ide-launcher @@ -336,6 +388,16 @@ quarkus-caffeine-deployment ${project.version} + + io.quarkus + quarkus-jacoco + ${project.version} + + + io.quarkus + quarkus-jacoco-deployment + ${project.version} + io.quarkus quarkus-jaxb @@ -346,6 +408,16 @@ quarkus-jaxb-deployment ${project.version} + + io.quarkus + quarkus-jaxp + ${project.version} + + + io.quarkus + quarkus-jaxp-deployment + ${project.version} + io.quarkus quarkus-jackson @@ -454,6 +526,11 @@ quarkus-datasource-common ${project.version} + + io.quarkus + quarkus-datasource-deployment-spi + ${project.version} + io.quarkus quarkus-datasource @@ -464,6 +541,41 @@ quarkus-datasource-deployment ${project.version} + + io.quarkus + quarkus-devservices-postgresql + ${project.version} + + + io.quarkus + quarkus-devservices-h2 + ${project.version} + + + io.quarkus + quarkus-devservices-mysql + ${project.version} + + + io.quarkus + quarkus-devservices-mariadb + ${project.version} + + + io.quarkus + quarkus-devservices-derby + ${project.version} + + + io.quarkus + quarkus-devservices-db2 + ${project.version} + + + io.quarkus + quarkus-devservices-mssql + ${project.version} + io.quarkus quarkus-elasticsearch-rest-client-common @@ -559,6 +671,46 @@ quarkus-oidc-deployment ${project.version} + + io.quarkus + quarkus-oidc-client + ${project.version} + + + io.quarkus + quarkus-oidc-client-deployment + ${project.version} + + + io.quarkus + quarkus-oidc-client-filter + ${project.version} + + + io.quarkus + quarkus-oidc-client-filter-deployment + ${project.version} + + + io.quarkus + quarkus-oidc-token-propagation + ${project.version} + + + io.quarkus + quarkus-oidc-token-propagation-deployment + ${project.version} + + + io.quarkus + quarkus-oidc-common + ${project.version} + + + io.quarkus + quarkus-oidc-common-deployment + ${project.version} + io.quarkus quarkus-keycloak-authorization @@ -639,6 +791,21 @@ quarkus-panache-common-deployment ${project.version} + + io.quarkus + quarkus-mongodb-panache-common-deployment + ${project.version} + + + io.quarkus + quarkus-panache-hibernate-common + ${project.version} + + + io.quarkus + quarkus-panache-hibernate-common-deployment + ${project.version} + io.quarkus quarkus-panache-mock @@ -659,6 +826,16 @@ quarkus-hibernate-orm-panache-deployment ${project.version} + + io.quarkus + quarkus-hibernate-reactive-panache + ${project.version} + + + io.quarkus + quarkus-hibernate-reactive-panache-deployment + ${project.version} + io.quarkus quarkus-hibernate-orm-panache-kotlin @@ -679,6 +856,16 @@ quarkus-hibernate-orm-panache-common-deployment ${project.version} + + io.quarkus + quarkus-hibernate-reactive-panache-common + ${project.version} + + + io.quarkus + quarkus-hibernate-reactive-panache-common-deployment + ${project.version} + io.quarkus quarkus-rest-data-panache @@ -699,6 +886,16 @@ quarkus-hibernate-orm-rest-data-panache-deployment ${project.version} + + io.quarkus + quarkus-mongodb-rest-data-panache + ${project.version} + + + io.quarkus + quarkus-mongodb-rest-data-panache-deployment + ${project.version} + io.quarkus quarkus-mongodb-panache @@ -711,12 +908,22 @@ io.quarkus - quarkus-hibernate-search-elasticsearch + quarkus-mongodb-panache-kotlin + ${project.version} + + + io.quarkus + quarkus-mongodb-panache-common + ${project.version} + + + io.quarkus + quarkus-hibernate-search-orm-elasticsearch ${project.version} io.quarkus - quarkus-hibernate-search-elasticsearch-deployment + quarkus-hibernate-search-orm-elasticsearch-deployment ${project.version} @@ -729,6 +936,11 @@ quarkus-hibernate-validator-deployment ${project.version} + + io.quarkus + quarkus-hibernate-validator-spi + ${project.version} + io.quarkus quarkus-infinispan-client @@ -859,6 +1071,16 @@ quarkus-smallrye-jwt-deployment ${project.version} + + io.quarkus + quarkus-smallrye-jwt-build + ${project.version} + + + io.quarkus + quarkus-smallrye-jwt-build-deployment + ${project.version} + io.quarkus quarkus-smallrye-context-propagation @@ -961,6 +1183,11 @@ + + io.quarkus + quarkus-smallrye-openapi-spi + ${project.version} + io.quarkus quarkus-smallrye-graphql @@ -1021,6 +1248,16 @@ quarkus-rest-client-jsonb-deployment ${project.version} + + io.quarkus + quarkus-rest-client-mutiny + ${project.version} + + + io.quarkus + quarkus-rest-client-mutiny-deployment + ${project.version} + io.quarkus quarkus-resteasy-common @@ -1071,6 +1308,16 @@ quarkus-resteasy-jaxb-deployment ${project.version} + + io.quarkus + quarkus-resteasy-multipart + ${project.version} + + + io.quarkus + quarkus-resteasy-multipart-deployment + ${project.version} + io.quarkus quarkus-resteasy-server-common @@ -1153,22 +1400,27 @@ io.quarkus - quarkus-vertx-http-deployment + quarkus-vertx-http-dev-console-spi ${project.version} io.quarkus - quarkus-vertx-web + quarkus-vertx-http-dev-console-runtime-spi ${project.version} io.quarkus - quarkus-vertx-web-deployment + quarkus-vertx-http-deployment ${project.version} io.quarkus - quarkus-reactive-datasource + quarkus-vertx-web + ${project.version} + + + io.quarkus + quarkus-vertx-web-deployment ${project.version} @@ -1225,6 +1477,17 @@ io.quarkus quarkus-grpc ${project.version} + + + com.google.android + annotations + + + + + io.quarkus + quarkus-grpc-common + ${project.version} io.quarkus @@ -1241,6 +1504,11 @@ quarkus-grpc-deployment ${project.version} + + io.quarkus + quarkus-grpc-common-deployment + ${project.version} + io.quarkus quarkus-grpc-protoc-plugin @@ -1254,7 +1522,12 @@ io.quarkus - quarkus-undertow-websockets-deployment + quarkus-websockets + ${project.version} + + + io.quarkus + quarkus-websockets-deployment ${project.version} @@ -1332,6 +1605,16 @@ quarkus-spring-data-jpa-deployment ${project.version} + + io.quarkus + quarkus-spring-data-rest + ${project.version} + + + io.quarkus + quarkus-spring-data-rest-deployment + ${project.version} + io.quarkus quarkus-spring-boot-properties @@ -1467,6 +1750,16 @@ quarkus-amazon-lambda-http-deployment ${project.version} + + io.quarkus + quarkus-amazon-lambda-rest + ${project.version} + + + io.quarkus + quarkus-amazon-lambda-rest-deployment + ${project.version} + io.quarkus quarkus-amazon-common @@ -1537,6 +1830,16 @@ quarkus-amazon-kms-deployment ${project.version} + + io.quarkus + quarkus-amazon-iam + ${project.version} + + + io.quarkus + quarkus-amazon-iam-deployment + ${project.version} + io.quarkus quarkus-amazon-alexa @@ -1547,6 +1850,103 @@ quarkus-amazon-alexa-deployment ${project.version} + + io.quarkus + quarkus-resteasy-reactive-common + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-common-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-spi-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-deployment + ${project.version} + test + test-jar + + + io.quarkus + quarkus-resteasy-reactive-servlet-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-servlet + ${project.version} + + + io.quarkus + quarkus-jaxrs-client + ${project.version} + + + io.quarkus + quarkus-jaxrs-client-deployment + ${project.version} + + + io.quarkus + quarkus-rest-client-reactive + ${project.version} + + + io.quarkus + quarkus-rest-client-reactive-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-qute + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-qute-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-jsonb + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-jsonb-deployment + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-jackson + ${project.version} + + + io.quarkus + quarkus-resteasy-reactive-jackson-deployment + ${project.version} + + + io.quarkus + quarkus-reactive-datasource + ${project.version} + io.quarkus quarkus-azure-functions-http @@ -1587,11 +1987,26 @@ quarkus-container-image-s2i-deployment ${project.version} + + io.quarkus + quarkus-container-image-openshift + ${project.version} + + + io.quarkus + quarkus-container-image-openshift-deployment + ${project.version} + io.quarkus quarkus-container-image-deployment ${project.version} + + io.quarkus + quarkus-container-image + ${project.version} + io.quarkus quarkus-container-image-spi @@ -1672,6 +2087,26 @@ quarkus-kubernetes-client-deployment-internal ${project.version} + + io.quarkus + quarkus-kubernetes-service-binding + ${project.version} + + + io.quarkus + quarkus-kubernetes-service-binding-deployment + ${project.version} + + + io.quarkus + quarkus-openshift-client + ${project.version} + + + io.quarkus + quarkus-openshift-client-deployment + ${project.version} + io.quarkus quarkus-consul-config @@ -1772,6 +2207,11 @@ quarkus-vault ${project.version} + + io.quarkus + quarkus-vault-model + ${project.version} + io.quarkus quarkus-vault-deployment @@ -1852,6 +2292,26 @@ quarkus-google-cloud-functions-http-deployment ${project.version} + + io.quarkus + quarkus-micrometer-deployment + ${project.version} + + + io.quarkus + quarkus-micrometer + ${project.version} + + + io.quarkus + quarkus-micrometer-registry-prometheus-deployment + ${project.version} + + + io.quarkus + quarkus-micrometer-registry-prometheus + ${project.version} + @@ -1904,6 +2364,11 @@ quarkus-test-kubernetes-client ${project.version} + + io.quarkus + quarkus-test-openshift-client + ${project.version} + io.quarkus quarkus-test-amazon-lambda @@ -1952,6 +2417,32 @@ + + + org.bouncycastle + bcprov-jdk15on + ${bouncycastle.version} + + + org.bouncycastle + bctls-jdk15on + ${bouncycastle.version} + + + org.bouncycastle + bcpkix-jdk15on + ${bouncycastle.version} + + + org.bouncycastle + bc-fips + ${bouncycastle.fips.version} + + + org.bouncycastle + bctls-fips + ${bouncycastle.tls.fips.version} + io.dekorate @@ -2004,11 +2495,25 @@ org.jboss.logmanager log4j2-jboss-logmanager ${log4j2-jboss-logmanager.version} + + + + org.jboss.logmanager + jboss-logmanager + + org.jboss.logmanager log4j-jboss-logmanager ${log4j-jboss-logmanager.version} + + + + org.jboss.logmanager + jboss-logmanager + + @@ -2119,7 +2624,7 @@ io.smallrye.reactive - mutiny-context-propagation + mutiny-smallrye-context-propagation ${mutiny.version} @@ -2132,6 +2637,16 @@ mutiny-reactor ${mutiny.version} + + io.smallrye.reactive + mutiny-kotlin + ${mutiny.version} + + + io.smallrye.reactive + mutiny-test-utils + ${mutiny.version} + io.quarkus quarkus-mutiny @@ -2152,6 +2667,16 @@ quarkus-resteasy-mutiny-deployment ${project.version} + + io.quarkus + quarkus-resteasy-mutiny-common + ${project.version} + + + io.quarkus + quarkus-resteasy-mutiny-common-deployment + ${project.version} + io.quarkus quarkus-resteasy-server-common-spi @@ -2223,6 +2748,11 @@ opentracing-jdbc ${opentracing-jdbc.version} + + io.opentracing.contrib + opentracing-kafka-client + ${opentracing-kafka.version} + io.rest-assured rest-assured @@ -2335,6 +2865,10 @@ org.apache.cxf cxf-rt-rs-client + + commons-logging + commons-logging + ${tika.version} @@ -2372,7 +2906,7 @@ io.quarkus.http - quarkus-http-websockets-jsr + quarkus-http-websocket-vertx ${quarkus-http.version} @@ -2385,6 +2919,16 @@ io.quarkus.security quarkus-security ${quarkus-security.version} + + + javax.inject + javax.inject + + + javax.enterprise + cdi-api + + io.quarkus @@ -2405,11 +2949,6 @@ 1.5.0 - - io.smallrye.common - smallrye-common-annotation - ${smallrye-common.version} - io.smallrye.config smallrye-config @@ -2419,6 +2958,10 @@ org.osgi org.osgi.annotation.versioning + + org.ow2.asm + asm + @@ -2461,6 +3004,11 @@ + + io.smallrye + smallrye-metrics-api + ${smallrye-metrics.version} + io.smallrye smallrye-open-api-core @@ -2505,6 +3053,11 @@ + + io.smallrye + smallrye-open-api-ui + ${smallrye-open-api.version} + io.smallrye smallrye-graphql-cdi @@ -2602,8 +3155,21 @@ javax javaee-api + + javax.annotation + javax.annotation-api + + + io.smallrye + smallrye-jwt-build + + + io.smallrye + smallrye-jwt-build + ${smallrye-jwt.version} + jakarta.activation jakarta.activation-api @@ -2645,7 +3211,7 @@ ${jakarta.interceptor-api.version} - + jakarta.ejb jakarta.ejb-api @@ -2671,11 +3237,39 @@ org.apache.activemq artemis-core-client ${artemis.version} + + + org.apache.geronimo.specs + geronimo-json_1.0_spec + + + org.apache.johnzon + johnzon-core + + + commons-logging + commons-logging + + org.apache.activemq artemis-jms-client ${artemis.version} + + + org.apache.geronimo.specs + geronimo-json_1.0_spec + + + org.apache.johnzon + johnzon-core + + + commons-logging + commons-logging + + org.apache.activemq @@ -2686,6 +3280,12 @@ org.apache.activemq artemis-server ${artemis.version} + + + org.checkerframework + checker-qual + + org.apache.activemq @@ -2707,6 +3307,11 @@ httpclient-cache ${httpclient.version} + + org.apache.httpcomponents + httpmime + ${httpclient.version} + org.apache.httpcomponents httpasyncclient @@ -2804,9 +3409,14 @@ okhttp ${okhttp.version} + + com.squareup.okhttp3 + logging-interceptor + ${okhttp.version} + io.sentry - sentry + sentry-jul ${sentry.version} @@ -2980,6 +3590,17 @@ + + org.eclipse.microprofile.jwt + microprofile-jwt-auth-api + ${microprofile-jwt.version} + + + org.osgi + org.osgi.annotation.versioning + + + org.glassfish jakarta.el @@ -3000,6 +3621,11 @@ jaxb-runtime ${jaxb-runtime.version} + + jakarta.xml.bind + jakarta.xml.bind-api + ${jakarta.xml.bind-api.version} + jakarta.security.jacc jakarta.security.jacc-api @@ -3025,16 +3651,12 @@ jandex ${jandex.version} - - org.aesh - aesh - ${aesh.version} - org.graalvm.nativeimage svm ${graal-sdk.version} + provided @@ -3227,6 +3849,41 @@ arc ${project.version} + + io.quarkus.resteasy.reactive + resteasy-reactive-processor + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive-vertx + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive-common + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive-common-processor + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive-client + ${project.version} + + + io.quarkus.resteasy.reactive + resteasy-reactive-client-processor + ${project.version} + org.wildfly.common wildfly-common @@ -3249,6 +3906,11 @@ wildfly-client-config ${wildfly-client-config.version} + + org.wildfly.security + wildfly-elytron + ${wildfly-elytron.version} + org.wildfly.security wildfly-elytron-auth-server @@ -3319,12 +3981,37 @@ wildfly-elytron-credential ${wildfly-elytron.version} + + org.wildfly.security + wildfly-elytron-sasl-gs2 + ${wildfly-elytron.version} + + + org.wildfly.security + wildfly-elytron-asn1 + ${wildfly-elytron.version} + + + org.wildfly.security + wildfly-elytron-sasl-gssapi + ${wildfly-elytron.version} + + + org.wildfly.security + wildfly-elytron-security-manager-action + ${wildfly-elytron.version} + org.ow2.asm asm ${asm.version} + + org.ow2.asm + asm-commons + ${asm.version} + org.ow2.asm asm-tree @@ -3347,8 +4034,8 @@ org.jboss.slf4j - slf4j-jboss-logging - ${slf4j-jboss-logging.version} + slf4j-jboss-logmanager + ${slf4j-jboss-logmanager.version} jakarta.enterprise @@ -3416,6 +4103,12 @@ org.postgresql postgresql ${postgresql-jdbc.version} + + + org.checkerframework + checker-qual + + org.mariadb.jdbc @@ -3495,6 +4188,27 @@ mockito-junit-jupiter ${mockito.version} + + org.jacoco + org.jacoco.core + ${jacoco.version} + + + org.jacoco + org.jacoco.report + ${jacoco.version} + + + org.jacoco + org.jacoco.agent + ${jacoco.version} + + + org.jacoco + org.jacoco.agent + ${jacoco.version} + runtime + net.sourceforge.htmlunit htmlunit @@ -3510,45 +4224,10 @@ - - io.smallrye.reactive - smallrye-axle-generator - ${axle-client.version} - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - - - - io.smallrye.reactive - smallrye-axle-amqp-client - ${axle-client.version} - - - io.smallrye.reactive - smallrye-axle-core - ${axle-client.version} - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - - io.smallrye.reactive smallrye-mutiny-vertx-core - ${mutiny-client.version} + ${mutiny-vertx.version} com.fasterxml.jackson.core @@ -3560,50 +4239,30 @@ - - io.smallrye.reactive - smallrye-axle-web-client - ${axle-client.version} - io.smallrye.reactive smallrye-mutiny-vertx-web-client - ${mutiny-client.version} - - - io.smallrye.reactive - smallrye-axle-sql-client - ${axle-client.version} + ${mutiny-vertx.version} io.smallrye.reactive smallrye-mutiny-vertx-sql-client - ${mutiny-client.version} - - - io.smallrye.reactive - smallrye-axle-mysql-client - ${axle-client.version} + ${mutiny-vertx.version} io.smallrye.reactive smallrye-mutiny-vertx-db2-client - ${mutiny-client.version} + ${mutiny-vertx.version} io.smallrye.reactive smallrye-mutiny-vertx-mysql-client - ${mutiny-client.version} - - - io.smallrye.reactive - smallrye-axle-pg-client - ${axle-client.version} + ${mutiny-vertx.version} io.smallrye.reactive smallrye-mutiny-vertx-pg-client - ${mutiny-client.version} + ${mutiny-vertx.version} @@ -3611,35 +4270,15 @@ client ${scram-client.version} - - io.smallrye.reactive - smallrye-axle-mail-client - ${axle-client.version} - - + io.smallrye.reactive smallrye-mutiny-vertx-mail-client - ${mutiny-client.version} + ${mutiny-vertx.version} io.smallrye.reactive smallrye-mutiny-vertx-redis-client - ${mutiny-client.version} - - - io.vertx - vertx-rx-java2 - ${vertx.version} - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - + ${mutiny-vertx.version} io.vertx @@ -3755,6 +4394,21 @@ + + io.smallrye.reactive + smallrye-connector-attribute-processor + ${smallrye-reactive-messaging.version} + + + io.quarkus + quarkus-reactive-messaging-http + ${project.version} + + + io.quarkus + quarkus-reactive-messaging-http-deployment + ${project.version} + org.infinispan @@ -4055,11 +4709,6 @@ - - jline - jline - ${jline.version} - org.apache.maven.shared maven-invoker @@ -4080,11 +4729,6 @@ jProcesses ${jprocesses.version} - - org.webjars - swagger-ui - ${swagger-ui.version} - org.flywaydb @@ -4111,17 +4755,6 @@ snakeyaml ${snakeyaml.version} - - org.osgi - org.osgi.core - ${osgi.version} - - - - org.webjars - bootstrap - ${bootstrap.version} - org.webjars @@ -4138,6 +4771,12 @@ com.google.errorprone error_prone_annotations ${error-prone-annotations.version} + + + org.codehaus.mojo + animal-sniffer-annotations + + @@ -4171,6 +4810,11 @@ quarkus-spring-data-commons-api ${quarkus-spring-data-api.version} + + io.quarkus + quarkus-spring-data-rest-api + ${quarkus-spring-data-api.version} + io.quarkus quarkus-spring-web-api @@ -4429,6 +5073,11 @@ quarkus-builder ${project.version} + + io.quarkus + quarkus-devtools-registry-client + ${project.version} + diff --git a/bom/test/pom.xml b/bom/test/pom.xml index 4192266f99787..d9316eb2e4c1c 100644 --- a/bom/test/pom.xml +++ b/bom/test/pom.xml @@ -16,8 +16,8 @@ Dependency management for integration tests. Importable by third party extension developers. - 1.2.1.Final - 1.14.3 + 1.4.2.Final + 1.15.2 diff --git a/build-parent/pom.xml b/build-parent/pom.xml index c350deab24d1d..ae17f52c26580 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -15,32 +15,38 @@ pom + 1.8 + 1.8 + true + - 6.15 - 7.7.0 - 4.1.1 - + 3.8.1 - 1.3.72 - 2.12.8 - 4.1.1 + 1.4.31 + 2.12.13 + 4.4.0 + + ${scala-maven-plugin.version} + + 3.0.0-M3 3.0.0-M5 + 3.0.0 ${version.surefire.plugin} - 20.1.0 - 4.1.1 - 1.1.0 + 21.0.0 + 21.0 + 4.3.2 1.1.0 3.9.2 - 2.3.30 + 2.3.31 1.5.0.Final @@ -56,7 +62,7 @@ 3.6.3 0.7.7 - 6.5.1 + 6.8.3 2.2 @@ -65,34 +71,78 @@ 2.1.1 1.0 1.4.1 - 1.1.2 + 1.1.3-RC1 1.3.3 1.0.1 1.1.1 - 1.0.2 + 1.0.3 4.7.2 + + 7.10.0 + docker.elastic.co/elasticsearch/elasticsearch-oss:${elasticsearch-server.version} + http + 3.0.2 1.11.3 3.3.3 - - quay.io/keycloak/keycloak:11.0.0 + + quay.io/keycloak/keycloak:12.0.3 4.0.13 - 2.26.3 - 6.0.0 + 2.27.2 + 7.0.0 - 0.8.5 + + 0.8.6 3.1.2 + + + 3.12.3 + 1.30.2 + + 2.0.30.Final + + + 4.6.0 + 5.15.2 + 3.5.1 + + + 0.12.1 + 0.22.0 + 1.9.1 + 0.12.1 + 0.2.1 + + 1.6.0.Final + ${project.version} + + nonBreaking + + true + + false + + + 3.1 + + + quay.io/quarkus/ubi-quarkus-native-image:21.0.0-java11 + + sh + ${maven.multiModuleProjectDirectory}/.github/docker-prune.${script.extension} + + ${enforcer.skip} @@ -108,30 +158,24 @@ - - - io.quarkus - quarkus-devmode-test-utils - ${project.version} - io.quarkus - quarkus-devtools-utilities + quarkus-test-devtools ${project.version} io.quarkus - quarkus-devtools-common + quarkus-devmode-test-utils ${project.version} io.quarkus - quarkus-platform-descriptor-api + quarkus-devtools-utilities ${project.version} io.quarkus - quarkus-platform-descriptor-resolver-json + quarkus-devtools-common ${project.version} @@ -200,10 +244,19 @@ com.google.code.findbugs jsr305 - + + + commons-logging + commons-logging + - + + io.netty + netty-tcnative-boringssl-static + test + ${boring-ssl.version} + io.quarkus quarkus-bootstrap-core @@ -232,6 +285,23 @@ ${project.version} test + + + + org.webjars + bootstrap + ${webjar.bootstrap.version} + + + org.webjars + font-awesome + ${webjar.font-awesome.version} + + + org.webjars + jquery + ${webjar.jquery.version} + @@ -245,8 +315,13 @@ org.jboss.logmanager.LogManager ${maven.home} + ${settings.localRepository} + ${session.request.userSettingsFile.path} + ${project.version} - ${jacoco.agent.argLine} + ${jacoco.agent.argLine} -Xmx1500m + + false @@ -256,7 +331,12 @@ org.jboss.logmanager.LogManager ${maven.home} + ${settings.localRepository} + ${session.request.userSettingsFile.path} + ${project.version} + + false @@ -340,6 +420,7 @@ javax.ws.rs:javax.ws.rs-api org.jboss.logging:jboss-logmanager + org.jboss.logging:jboss-logging-jdk javax:javaee-api @@ -350,6 +431,8 @@ jakarta.json:jakarta.json-api io.netty:netty-all + + org.jboss.logmanager:jboss-logmanager log4j:log4j org.apache.logging.log4j:log4j-core @@ -359,7 +442,8 @@ commons-logging:commons-logging-api org.springframework:spring-jcl org.slf4j:jcl-over-slf4j - + + org.jboss.slf4j:slf4j-jboss-logging org.slf4j:slf4j-simple org.slf4j:slf4j-nop org.slf4j:slf4j-jdk14 @@ -380,6 +464,8 @@ org.checkerframework:checker-qual org.jboss.resteasy:resteasy-context-propagation + com.google.android:annotations + org.codehaus.mojo:animal-sniffer-annotations @@ -392,6 +478,32 @@ enforce + + enforce-test-deps-scope + + + + + io.quarkus:quarkus-test-* + io.rest-assured:* + org.assertj:* + org.junit.jupiter:* + + + io.quarkus:quarkus-test-*:*:*:test + io.rest-assured:*:*:*:test + org.assertj:*:*:*:test + org.junit.jupiter:*:*:*:test + + Found test dependencies with wrong scope: + + + ${enforce-test-deps-scope.skip} + + + enforce + + @@ -415,7 +527,7 @@ net.revelc.code.formatter formatter-maven-plugin - 2.12.1 + 2.14.0 quarkus-ide-config @@ -432,7 +544,7 @@ net.revelc.code impsort-maven-plugin - 1.4.1 + 1.6.0 java.,javax.,org.,com. * @@ -449,20 +561,6 @@ maven-resources-plugin 3.1.0 - - maven-compiler-plugin - - true - - - - com.github.alexcojocaru - elasticsearch-maven-plugin - ${elasticsearch-maven-plugin.version} - - ${elasticsearch-server.version} - - org.antlr antlr4-maven-plugin @@ -482,6 +580,16 @@ + + org.codehaus.mojo + build-helper-maven-plugin + ${build-helper-plugin.version} + + + org.revapi + revapi-maven-plugin + ${revapi-maven-plugin.version} + @@ -493,17 +601,9 @@ [9,) - - - - maven-compiler-plugin - - 8 - true - - - - + + 8 + @@ -597,7 +697,7 @@ ${nexus-staging-maven-plugin.version} true - https://oss.sonatype.org/ + https://s01.oss.sonatype.org/ ossrh false true @@ -805,5 +905,129 @@ + + + api-check + + false + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + parse-version + + parse-version + + validate + + + + + org.revapi + revapi-maven-plugin + + + io.quarkus + quarkus-revapi-config + ${project.version} + + + org.revapi + revapi-java + ${revapi-java-plugin.version} + + + org.revapi + revapi-reporter-json + ${revapi-reporter-json.version} + + + org.revapi + revapi-reporter-text + ${revapi-reporter-text.version} + + + + + ${project.groupId}:${project.artifactId}:${revapi.oldVersion} + + + ${project.groupId}:${project.artifactId}:${revapi.newVersion} + + + false + + \d+\.\d+\.\d+\.Final + + + + + revapi/revapi-configuration.xml + + + api-changes.xml + + versions/v${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion} + + + + + nonBreaking + potentiallyBreaking + true + ${revapi.checkdeps} + xml + ${project.build.directory}/api-changes-suggestions.xml + true + + + + + api-check + + check + + verify + + + + api-report + + report + + package + + + + + + + + + Windows + + + Windows + + + + bat + + + diff --git a/core/builder/pom.xml b/core/builder/pom.xml index 491c8718cb601..b2160369dbd27 100644 --- a/core/builder/pom.xml +++ b/core/builder/pom.xml @@ -66,7 +66,6 @@ maven-surefire-plugin 1 - false diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java index 704bb995249e3..59cf7a669f250 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java @@ -155,7 +155,7 @@ public BuildChain build() throws ChainBuildException { int initialSingleCount = 0; int initialMultiCount = 0; final Map steps = this.steps; - // compile master produce/consume maps + // compile main produce/consume maps final Map> allConsumes = new HashMap<>(); final Map> allProduces = new HashMap<>(); final Set initialIds = this.initialIds; diff --git a/core/builder/src/main/java/io/quarkus/builder/Execution.java b/core/builder/src/main/java/io/quarkus/builder/Execution.java index 136015674b388..e89393b444963 100644 --- a/core/builder/src/main/java/io/quarkus/builder/Execution.java +++ b/core/builder/src/main/java/io/quarkus/builder/Execution.java @@ -52,6 +52,7 @@ final class Execution { this.multis = new ConcurrentHashMap<>(builder.getInitialMulti()); this.finalIds = finalIds; final EnhancedQueueExecutor.Builder executorBuilder = new EnhancedQueueExecutor.Builder(); + executorBuilder.setRegisterMBean(false); executorBuilder.setCorePoolSize(8).setMaximumPoolSize(1024); executorBuilder.setExceptionHandler(JBossExecutors.loggingExceptionHandler()); executorBuilder.setThreadFactory(new JBossThreadFactory(new ThreadGroup("build group"), Boolean.FALSE, null, "build-%t", diff --git a/core/builder/src/test/java/io/quarkus/builder/BasicTests.java b/core/builder/src/test/java/io/quarkus/builder/BasicTests.java index f8dabe7f37059..f9efe4c9a36ca 100644 --- a/core/builder/src/test/java/io/quarkus/builder/BasicTests.java +++ b/core/builder/src/test/java/io/quarkus/builder/BasicTests.java @@ -46,7 +46,6 @@ public void execute(final BuildContext context) { @Test public void testFailure() throws ChainBuildException, BuildException { final BuildChainBuilder builder = BuildChain.builder(); - final AtomicBoolean ran = new AtomicBoolean(); BuildStepBuilder stepBuilder = builder.addBuildStep(new BuildStep() { @Override public void execute(final BuildContext context) { diff --git a/core/class-change-agent/pom.xml b/core/class-change-agent/pom.xml new file mode 100644 index 0000000000000..5b57062f854b3 --- /dev/null +++ b/core/class-change-agent/pom.xml @@ -0,0 +1,40 @@ + + + 4.0.0 + + + quarkus-build-parent + io.quarkus + 999-SNAPSHOT + ../../build-parent/pom.xml + + + quarkus-class-change-agent + Quarkus - Core - Class Change Agent + + A Java Agent that exposes the instrumentation API. This allows for super fast + hot reloads if there have been no changes to the structure of the class files. + + This agent is not required for hot reload, it just provides an optimisation in + some circumstances. + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + io.quarkus.changeagent.ClassChangeAgent + true + + + + + + + diff --git a/core/class-change-agent/src/main/java/io/quarkus/changeagent/ClassChangeAgent.java b/core/class-change-agent/src/main/java/io/quarkus/changeagent/ClassChangeAgent.java new file mode 100644 index 0000000000000..4b92caf6824da --- /dev/null +++ b/core/class-change-agent/src/main/java/io/quarkus/changeagent/ClassChangeAgent.java @@ -0,0 +1,18 @@ +package io.quarkus.changeagent; + +import java.lang.instrument.Instrumentation; + +public class ClassChangeAgent { + + private static volatile Instrumentation instrumentation; + + public static Instrumentation getInstrumentation() { + return instrumentation; + } + + public static void premain(java.lang.String s, Instrumentation i) { + instrumentation = i; + + } + +} diff --git a/core/deployment/pom.xml b/core/deployment/pom.xml index 2c860be4991f7..8a4d6727b2947 100644 --- a/core/deployment/pom.xml +++ b/core/deployment/pom.xml @@ -8,7 +8,6 @@ io.quarkus quarkus-core-parent 999-SNAPSHOT - ../ quarkus-core-deployment @@ -35,6 +34,10 @@ io.quarkus quarkus-development-mode-spi + + io.quarkus + quarkus-class-change-agent + io.quarkus quarkus-bootstrap-core @@ -90,6 +93,19 @@ test-jar test + + io.quarkus + quarkus-bom-quarkus-platform-properties + pom + ${project.version} + + + * + * + + + test + diff --git a/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java b/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java index 4324ca0f26135..9fddfbe72ae85 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java @@ -12,66 +12,6 @@ */ public final class Capabilities extends SimpleBuildItem { - // The following constants will be removed at some point post Quarkus 1.6 - @Deprecated - public static final String AGROAL = Capability.AGROAL.getName(); - @Deprecated - public static final String CDI_ARC = Capability.CDI.getName(); - @Deprecated - public static final String SERVLET = Capability.SERVLET.getName(); - @Deprecated - public static final String TRANSACTIONS = Capability.TRANSACTIONS.getName(); - @Deprecated - public static final String JACKSON = Capability.JACKSON.getName(); - @Deprecated - public static final String JSONB = Capability.JSONB.getName(); - @Deprecated - public static final String REST_JACKSON = Capability.REST_JACKSON.getName(); - @Deprecated - public static final String REST_JSONB = Capability.REST_JSONB.getName(); - @Deprecated - public static final String RESTEASY_JSON_EXTENSION = Capability.RESTEASY_JSON.getName(); - @Deprecated - public static final String RESTEASY = Capability.RESTEASY.getName(); - @Deprecated - public static final String JWT = Capability.JWT.getName(); - @Deprecated - public static final String TIKA = Capability.TIKA.getName(); - @Deprecated - public static final String MONGODB_PANACHE = Capability.MONGODB_PANACHE.getName(); - @Deprecated - public static final String FLYWAY = Capability.FLYWAY.getName(); - @Deprecated - public static final String LIQUIBASE = Capability.LIQUIBASE.getName(); - @Deprecated - public static final String SECURITY = Capability.SECURITY.getName(); - @Deprecated - public static final String SECURITY_ELYTRON_OAUTH2 = Capability.SECURITY_ELYTRON_OAUTH2.getName(); - @Deprecated - public static final String SECURITY_ELYTRON_JDBC = Capability.SECURITY_ELYTRON_JDBC.getName(); - @Deprecated - public static final String SECURITY_ELYTRON_LDAP = Capability.SECURITY_ELYTRON_LDAP.getName(); - @Deprecated - public static final String SECURITY_JPA = Capability.SECURITY_JPA.getName(); - @Deprecated - public static final String QUARTZ = Capability.QUARTZ.getName(); - @Deprecated - public static final String METRICS = Capability.METRICS.getName(); - @Deprecated - public static final String RESTEASY_MUTINY_EXTENSION = Capability.RESTEASY_MUTINY.getName(); - @Deprecated - public static final String CONTAINER_IMAGE_JIB = Capability.CONTAINER_IMAGE_JIB.getName(); - @Deprecated - public static final String CONTAINER_IMAGE_DOCKER = Capability.CONTAINER_IMAGE_DOCKER.getName(); - @Deprecated - public static final String CONTAINER_IMAGE_S2I = Capability.CONTAINER_IMAGE_S2I.getName(); - @Deprecated - public static final String HIBERNATE_ORM = Capability.HIBERNATE_ORM.getName(); - @Deprecated - public static final String SMALLRYE_OPENTRACING = Capability.SMALLRYE_OPENTRACING.getName(); - @Deprecated - public static final String SPRING_WEB = Capability.SPRING_WEB.getName(); - private final Set capabilities; public Capabilities(Set capabilities) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/Capability.java b/core/deployment/src/main/java/io/quarkus/deployment/Capability.java index 3824f527ae542..c127fe43503a1 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/Capability.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/Capability.java @@ -13,6 +13,7 @@ public enum Capability { * JSR 365 compatible contexts and dependency injection */ CDI, + CONFIG_YAML, /** * Java Servlet API */ @@ -28,9 +29,13 @@ public enum Capability { RESTEASY, RESTEASY_JSON, RESTEASY_MUTINY, + RESTEASY_REACTIVE, + RESTEASY_JACKSON, + RESTEASY_JSONB, JWT, TIKA, MONGODB_PANACHE, + MONGODB_PANACHE_KOTLIN, FLYWAY, LIQUIBASE, SECURITY, @@ -39,17 +44,34 @@ public enum Capability { SECURITY_ELYTRON_LDAP, SECURITY_JPA, QUARTZ, + KUBERNETES_SERVICE_BINDING, + /** + * @deprecated + * @see io.quarkus.deployment.metrics.MetricsCapabilityBuildItem + */ METRICS, CONTAINER_IMAGE_JIB, CONTAINER_IMAGE_DOCKER, CONTAINER_IMAGE_S2I, + CONTAINER_IMAGE_OPENSHIFT, HIBERNATE_ORM, + HIBERNATE_ENVERS, HIBERNATE_REACTIVE, + HIBERNATE_VALIDATOR, + /** + * Presence of an io.opentracing tracer (for example, Jaeger). + */ + OPENTRACING, + /** + * Presence of SmallRye OpenTracing. + */ SMALLRYE_OPENTRACING, - SPRING_WEB; + SPRING_WEB, + SMALLRYE_OPENAPI, + OPENSHIFT_CLIENT; /** - * + * * @return the name */ public String getName() { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenContext.java b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenContext.java index 8fcb203ef7538..a0c5dfb669c3e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenContext.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenContext.java @@ -1,6 +1,7 @@ package io.quarkus.deployment; import java.nio.file.Path; +import java.util.Map; import io.quarkus.bootstrap.model.AppModel; @@ -9,12 +10,17 @@ public class CodeGenContext { private final Path outDir; private final Path workDir; private final Path inputDir; + private final boolean redirectIO; + private final Map properties; - public CodeGenContext(AppModel model, Path outDir, Path workDir, Path inputDir) { + public CodeGenContext(AppModel model, Path outDir, Path workDir, Path inputDir, boolean redirectIO, + Map properties) { this.model = model; this.outDir = outDir; this.workDir = workDir; this.inputDir = inputDir; + this.redirectIO = redirectIO; + this.properties = properties; } public AppModel appModel() { @@ -32,4 +38,12 @@ public Path workDir() { public Path inputDir() { return inputDir; } + + public boolean shouldRedirectIO() { + return redirectIO; + } + + public Map properties() { + return properties; + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java index 5b7ce4af881fb..25c2550b7bd20 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenerator.java @@ -5,6 +5,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.ServiceLoader; import java.util.Set; import java.util.function.Consumer; @@ -23,10 +24,11 @@ public class CodeGenerator { public static void initAndRun(ClassLoader classLoader, Set sourceParentDirs, Path generatedSourcesDir, Path buildDir, Consumer sourceRegistrar, - AppModel appModel) throws CodeGenException { + AppModel appModel, Map properties) throws CodeGenException { List generators = init(classLoader, sourceParentDirs, generatedSourcesDir, buildDir, sourceRegistrar); for (CodeGenData generator : generators) { - trigger(classLoader, generator, appModel); + generator.setRedirectIO(true); + trigger(classLoader, generator, appModel, properties); } } @@ -45,7 +47,7 @@ public static List init(ClassLoader deploymentClassLoader, } catch (ClassNotFoundException e) { throw new CodeGenException("Failde to load CodeGenProvider class from deployment classloader", e); } - for (CodeGenProvider provider : ServiceLoader.load(codeGenProviderClass)) { + for (CodeGenProvider provider : ServiceLoader.load(codeGenProviderClass, deploymentClassLoader)) { Path outputDir = codeGenOutDir(generatedSourcesDir, provider, sourceRegistrar); for (Path sourceParentDir : sourceParentDirs) { result.add( @@ -74,19 +76,21 @@ private static T callWithClassloader(ClassLoader deploymentClassLoader, Code * @param deploymentClassLoader deployment classloader * @param data code gen * @param appModel app model + * @param properties custom code generation properties * @return true if sources have been created * @throws CodeGenException on failure */ public static boolean trigger(ClassLoader deploymentClassLoader, CodeGenData data, - AppModel appModel) throws CodeGenException { + AppModel appModel, + Map properties) throws CodeGenException { return callWithClassloader(deploymentClassLoader, () -> { - Thread.currentThread().setContextClassLoader(deploymentClassLoader); - CodeGenProvider provider = data.provider; return Files.isDirectory(data.sourceDir) - && provider.trigger(new CodeGenContext(appModel, data.outPath, data.buildDir, data.sourceDir)); + && provider.trigger( + new CodeGenContext(appModel, data.outPath, data.buildDir, data.sourceDir, data.redirectIO, + properties)); }); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoader.java b/core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoader.java index 2f1c3145eb830..14b9990b3f0bd 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoader.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoader.java @@ -10,6 +10,7 @@ import static io.quarkus.deployment.util.ReflectUtil.rawTypeIs; import static io.quarkus.deployment.util.ReflectUtil.rawTypeOf; import static io.quarkus.deployment.util.ReflectUtil.rawTypeOfParameter; +import static java.util.Arrays.asList; import java.io.IOException; import java.lang.reflect.AnnotatedElement; @@ -79,6 +80,7 @@ import io.quarkus.deployment.recording.RecorderContext; import io.quarkus.deployment.util.ReflectUtil; import io.quarkus.deployment.util.ServiceUtil; +import io.quarkus.dev.spi.DevModeType; import io.quarkus.gizmo.BytecodeCreator; import io.quarkus.gizmo.FieldDescriptor; import io.quarkus.gizmo.ResultHandle; @@ -88,6 +90,9 @@ import io.quarkus.runtime.annotations.Recorder; import io.quarkus.runtime.configuration.ConfigUtils; import io.quarkus.runtime.configuration.QuarkusConfigFactory; +import io.smallrye.config.KeyMap; +import io.smallrye.config.KeyMapBackedConfigSource; +import io.smallrye.config.NameIterator; import io.smallrye.config.PropertiesConfigSource; import io.smallrye.config.SmallRyeConfig; import io.smallrye.config.SmallRyeConfigBuilder; @@ -99,6 +104,7 @@ public final class ExtensionLoader { private ExtensionLoader() { } + private static final Logger loadLog = Logger.getLogger("io.quarkus.deployment"); private static final Logger cfgLog = Logger.getLogger("io.quarkus.configuration"); private static final String CONFIG_ROOTS_LIST = "META-INF/quarkus-config-roots.list"; @@ -107,61 +113,22 @@ private static boolean isRecorder(AnnotatedElement element) { return element.isAnnotationPresent(Recorder.class); } - /** - * Load all the build steps from the given class loader. - * - * @param classLoader the class loader - * @return a consumer which adds the steps to the given chain builder - * @throws IOException if the class loader could not load a resource - * @throws ClassNotFoundException if a build step class is not found - */ - public static Consumer loadStepsFrom(ClassLoader classLoader) - throws IOException, ClassNotFoundException { - return loadStepsFrom(classLoader, new Properties()); - } - - /** - * Load all the build steps from the given class loader. - * - * @param classLoader the class loader - * @param launchMode the launch mode - * @return a consumer which adds the steps to the given chain builder - * @throws IOException if the class loader could not load a resource - * @throws ClassNotFoundException if a build step class is not found - */ - public static Consumer loadStepsFrom(ClassLoader classLoader, LaunchMode launchMode, - Consumer configCustomizer) - throws IOException, ClassNotFoundException { - return loadStepsFrom(classLoader, new Properties(), launchMode, configCustomizer); - } - - /** - * Load all the build steps from the given class loader. - * - * @param classLoader the class loader - * @param buildSystemProps the build system properties to use - * @return a consumer which adds the steps to the given chain builder - * @throws IOException if the class loader could not load a resource - * @throws ClassNotFoundException if a build step class is not found - */ - public static Consumer loadStepsFrom(ClassLoader classLoader, Properties buildSystemProps) - throws IOException, ClassNotFoundException { - return loadStepsFrom(classLoader, buildSystemProps, LaunchMode.NORMAL, null); - } - /** * Load all the build steps from the given class loader. * * @param classLoader the class loader * @param buildSystemProps the build system properties to use + * @param platformProperties Quarkus platform properties + * @param launchMode launch mode + * @param configCustomizer configuration customizer * @return a consumer which adds the steps to the given chain builder * @throws IOException if the class loader could not load a resource * @throws ClassNotFoundException if a build step class is not found */ public static Consumer loadStepsFrom(ClassLoader classLoader, Properties buildSystemProps, - LaunchMode launchMode, Consumer configCustomizer) + Map platformProperties, LaunchMode launchMode, DevModeType devModeType, + Consumer configCustomizer) throws IOException, ClassNotFoundException { - // populate with all known types List> roots = new ArrayList<>(); for (Class clazz : ServiceUtil.classesNamedIn(classLoader, CONFIG_ROOTS_LIST)) { @@ -183,8 +150,19 @@ public static Consumer loadStepsFrom(ClassLoader classLoader, final DefaultValuesConfigurationSource ds2 = new DefaultValuesConfigurationSource( reader.getBuildTimeRunTimePatternMap()); final PropertiesConfigSource pcs = new PropertiesConfigSource(buildSystemProps, "Build system"); - - builder.withSources(ds1, ds2, pcs); + if (platformProperties.isEmpty()) { + builder.withSources(ds1, ds2, pcs); + } else { + final KeyMap props = new KeyMap<>(platformProperties.size()); + for (Map.Entry prop : platformProperties.entrySet()) { + props.findOrAdd(new NameIterator(prop.getKey())).putRootValue(prop.getValue()); + } + final KeyMapBackedConfigSource platformConfigSource = new KeyMapBackedConfigSource("Quarkus platform", + // Our default value configuration source is using an ordinal of Integer.MIN_VALUE + // (see io.quarkus.deployment.configuration.DefaultValuesConfigurationSource) + Integer.MIN_VALUE + 1000, props); + builder.withSources(ds1, ds2, platformConfigSource, pcs); + } if (configCustomizer != null) { configCustomizer.accept(builder); @@ -208,7 +186,7 @@ public static Consumer loadStepsFrom(ClassLoader classLoader, for (Class clazz : ServiceUtil.classesNamedIn(classLoader, "META-INF/quarkus-build-steps.list")) { try { result = result.andThen( - ExtensionLoader.loadStepsFrom(clazz, readResult, proxies, launchMode)); + ExtensionLoader.loadStepsFromClass(clazz, readResult, proxies, launchMode, devModeType)); } catch (Throwable e) { throw new RuntimeException("Failed to load steps from " + clazz, e); } @@ -248,11 +226,15 @@ public boolean canHandleObject(final Object obj, final boolean staticInit) { * @param launchMode the launch mode * @return a consumer which adds the steps to the given chain builder */ - public static Consumer loadStepsFrom(Class clazz, BuildTimeConfigurationReader.ReadResult readResult, - Map, Object> runTimeProxies, final LaunchMode launchMode) { + private static Consumer loadStepsFromClass(Class clazz, + BuildTimeConfigurationReader.ReadResult readResult, + Map, Object> runTimeProxies, final LaunchMode launchMode, DevModeType devModeType) { final Constructor[] constructors = clazz.getDeclaredConstructors(); // this is the chain configuration that will contain all steps on this class and be returned Consumer chainConfig = Functions.discardingConsumer(); + if (Modifier.isAbstract(clazz.getModifiers())) { + return chainConfig; + } // this is the step configuration that applies to all steps on this class Consumer stepConfig = Functions.discardingConsumer(); // this is the build step instance setup that applies to all steps on this class @@ -290,6 +272,7 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass)); ctorParamFns.add(bc -> bc.consumeMulti(buildItemClass)); } else if (isConsumerOf(parameterType, BuildItem.class)) { + deprecatedProducer(parameter); final Class buildItemClass = rawTypeOfParameter(parameterType, 0) .asSubclass(BuildItem.class); if (overridable) { @@ -308,6 +291,7 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim } ctorParamFns.add(bc -> (Consumer) bc::produce); } else if (isBuildProducerOf(parameterType, BuildItem.class)) { + deprecatedProducer(parameter); final Class buildItemClass = rawTypeOfParameter(parameterType, 0) .asSubclass(BuildItem.class); if (overridable) { @@ -399,6 +383,7 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim stepInstanceSetup = stepInstanceSetup .andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, bc.consumeMulti(buildItemClass))); } else if (isConsumerOf(fieldType, BuildItem.class)) { + deprecatedProducer(field); final Class buildItemClass = rawTypeOfParameter(fieldType, 0).asSubclass(BuildItem.class); if (overridable) { if (weak) { @@ -417,6 +402,7 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim stepInstanceSetup = stepInstanceSetup .andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, (Consumer) bc::produce)); } else if (isBuildProducerOf(fieldType, BuildItem.class)) { + deprecatedProducer(field); final Class buildItemClass = rawTypeOfParameter(fieldType, 0).asSubclass(BuildItem.class); if (overridable) { if (weak) { @@ -482,7 +468,7 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim } // now iterate the methods - final Method[] methods = clazz.getDeclaredMethods(); + final List methods = getMethods(clazz); for (Method method : methods) { final int mods = method.getModifiers(); if (Modifier.isStatic(mods)) { @@ -535,6 +521,8 @@ public static Consumer loadStepsFrom(Class clazz, BuildTim final Class parameterClass = parameter.getType(); if (parameterClass == LaunchMode.class) { paramSuppList.add(() -> launchMode); + } else if (parameterClass == DevModeType.class) { + paramSuppList.add(() -> devModeType); } else if (parameterClass.isAnnotationPresent(ConfigRoot.class)) { final ConfigRoot annotation = parameterClass.getAnnotation(ConfigRoot.class); final ConfigPhase phase = annotation.phase(); @@ -963,6 +951,21 @@ public String toString() { return chainConfig; } + private static void deprecatedProducer(final Object element) { + loadLog.warnf( + "Producing values from constructors and fields is no longer supported and will be removed in a future release: %s", + element); + } + + protected static List getMethods(Class clazz) { + List declaredMethods = new ArrayList<>(); + if (!clazz.getName().equals(Object.class.getName())) { + declaredMethods.addAll(getMethods(clazz.getSuperclass())); + declaredMethods.addAll(asList(clazz.getDeclaredMethods())); + } + return declaredMethods; + } + private static BooleanSupplier and(BooleanSupplier a, BooleanSupplier b) { return () -> a.getAsBoolean() && b.getAsBoolean(); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/Feature.java b/core/deployment/src/main/java/io/quarkus/deployment/Feature.java index 56bab5e6a90d5..b953e83109000 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/Feature.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/Feature.java @@ -12,6 +12,7 @@ public enum Feature { AGROAL, AMAZON_DYNAMODB, AMAZON_LAMBDA, + AMAZON_IAM, AMAZON_S3, AMAZON_SNS, AMAZON_SQS, @@ -35,6 +36,7 @@ public enum Feature { HIBERNATE_ORM_PANACHE_KOTLIN, HIBERNATE_ORM_REST_DATA_PANACHE, HIBERNATE_REACTIVE, + HIBERNATE_REACTIVE_PANACHE, HIBERNATE_SEARCH_ELASTICSEARCH, HIBERNATE_VALIDATOR, INFINISPAN_CLIENT, @@ -57,29 +59,44 @@ public enum Feature { LIQUIBASE, LOGGING_GELF, MAILER, + MICROMETER, MONGODB_CLIENT, MONGODB_PANACHE, + MONGODB_PANACHE_KOTLIN, + MONGODB_REST_DATA_PANACHE, MUTINY, NARAYANA_JTA, NARAYANA_STM, - REACTIVE_PG_CLIENT, - REACTIVE_MYSQL_CLIENT, - REACTIVE_DB2_CLIENT, NEO4J, OIDC, + OIDC_CLIENT, + OIDC_CLIENT_FILTER, + OIDC_TOKEN_PROPAGATION, + OPENSHIFT_CLIENT, PICOCLI, + QUARTZ, QUTE, + REACTIVE_PG_CLIENT, + REACTIVE_MYSQL_CLIENT, + REACTIVE_DB2_CLIENT, REDIS_CLIENT, RESTEASY, RESTEASY_JACKSON, RESTEASY_JAXB, RESTEASY_JSONB, + RESTEASY_MULTIPART, RESTEASY_MUTINY, RESTEASY_QUTE, + RESTEASY_REACTIVE, + RESTEASY_REACTIVE_QUTE, + RESTEASY_REACTIVE_JAXRS_CLIENT, + RESTEASY_REACTIVE_JSONB, + RESTEASY_REACTIVE_JACKSON, REST_CLIENT, REST_CLIENT_JACKSON, REST_CLIENT_JAXB, REST_CLIENT_JSONB, + REST_CLIENT_MUTINY, SCALA, SCHEDULER, SECURITY, @@ -106,6 +123,7 @@ public enum Feature { SPRING_DI, SPRING_WEB, SPRING_DATA_JPA, + SPRING_DATA_REST, SPRING_SECURITY, SPRING_BOOT_PROPERTIES, SPRING_CACHE, @@ -113,7 +131,7 @@ public enum Feature { SPRING_SCHEDULED, SWAGGER_UI, TIKA, - UNDERTOW_WEBSOCKETS, + WEBSOCKETS, VAULT, VERTX, VERTX_WEB, diff --git a/core/deployment/src/main/java/io/quarkus/deployment/IsNormalNotRemoteDev.java b/core/deployment/src/main/java/io/quarkus/deployment/IsNormalNotRemoteDev.java new file mode 100644 index 0000000000000..516ec900776d0 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/IsNormalNotRemoteDev.java @@ -0,0 +1,27 @@ +package io.quarkus.deployment; + +import java.util.function.BooleanSupplier; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.dev.spi.DevModeType; +import io.quarkus.runtime.LaunchMode; + +/** + * boolean supplier that returns true if the application is running in normal + * mode, but is not a remote dev client. Intended for use with {@link BuildStep#onlyIf()} + */ +public class IsNormalNotRemoteDev implements BooleanSupplier { + + private final LaunchMode launchMode; + private final DevModeType devModeType; + + public IsNormalNotRemoteDev(LaunchMode launchMode, DevModeType devModeType) { + this.launchMode = launchMode; + this.devModeType = devModeType; + } + + @Override + public boolean getAsBoolean() { + return launchMode == LaunchMode.NORMAL && devModeType == null; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/IsRemoteDevClient.java b/core/deployment/src/main/java/io/quarkus/deployment/IsRemoteDevClient.java new file mode 100644 index 0000000000000..0e7ef32fb100a --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/IsRemoteDevClient.java @@ -0,0 +1,24 @@ +package io.quarkus.deployment; + +import java.util.function.BooleanSupplier; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.dev.spi.DevModeType; + +/** + * boolean supplier that returns true if the application is the local side + * of remote dev mode. Intended for use with {@link BuildStep#onlyIf()} + */ +public class IsRemoteDevClient implements BooleanSupplier { + + private final DevModeType devModeType; + + public IsRemoteDevClient(DevModeType devModeType) { + this.devModeType = devModeType; + } + + @Override + public boolean getAsBoolean() { + return devModeType == DevModeType.REMOTE_LOCAL_SIDE; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/QuarkusAugmentor.java b/core/deployment/src/main/java/io/quarkus/deployment/QuarkusAugmentor.java index 2546e96ad42d8..09882de1382ad 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/QuarkusAugmentor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/QuarkusAugmentor.java @@ -7,6 +7,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.function.Consumer; @@ -35,8 +36,9 @@ import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; import io.quarkus.deployment.pkg.builditem.DeploymentResultBuildItem; -import io.quarkus.deployment.util.JavaVersionUtil; +import io.quarkus.dev.spi.DevModeType; import io.quarkus.runtime.LaunchMode; +import io.quarkus.runtime.util.JavaVersionUtil; public class QuarkusAugmentor { @@ -48,6 +50,7 @@ public class QuarkusAugmentor { private final Set> finalResults; private final List> buildChainCustomizers; private final LaunchMode launchMode; + private final DevModeType devModeType; private final List additionalApplicationArchives; private final Collection excludedFromIndexing; private final LiveReloadBuildItem liveReloadBuildItem; @@ -74,6 +77,7 @@ public class QuarkusAugmentor { this.configCustomizer = builder.configCustomizer; this.deploymentClassLoader = builder.deploymentClassLoader; this.rebuild = builder.rebuild; + this.devModeType = builder.devModeType; } public BuildResult run() throws Exception { @@ -94,12 +98,11 @@ public BuildResult run() throws Exception { //TODO: we load everything from the deployment class loader //this allows the deployment config (application.properties) to be loaded, but in theory could result //in additional stuff from the deployment leaking in, this is unlikely but has a bit of a smell. - if (buildSystemProperties != null) { - ExtensionLoader.loadStepsFrom(deploymentClassLoader, buildSystemProperties, launchMode, configCustomizer) - .accept(chainBuilder); - } else { - ExtensionLoader.loadStepsFrom(deploymentClassLoader, launchMode, configCustomizer).accept(chainBuilder); - } + ExtensionLoader.loadStepsFrom(deploymentClassLoader, + buildSystemProperties == null ? new Properties() : buildSystemProperties, + effectiveModel.getPlatformProperties(), launchMode, devModeType, configCustomizer) + .accept(chainBuilder); + Thread.currentThread().setContextClassLoader(classLoader); chainBuilder.loadProviders(classLoader); @@ -138,7 +141,8 @@ public BuildResult run() throws Exception { .produce(rootBuilder.build(buildCloseables)) .produce(new ShutdownContextBuildItem()) .produce(new RawCommandLineArgumentsBuildItem()) - .produce(new LaunchModeBuildItem(launchMode)) + .produce(new LaunchModeBuildItem(launchMode, + devModeType == null ? Optional.empty() : Optional.of(devModeType))) .produce(new BuildSystemTargetBuildItem(targetDir, baseName, rebuild, buildSystemProperties == null ? new Properties() : buildSystemProperties)) .produce(new DeploymentClassLoaderBuildItem(deploymentClassLoader)) @@ -192,6 +196,7 @@ public static final class Builder { String baseName = "quarkus-application"; Consumer configCustomizer; ClassLoader deploymentClassLoader; + DevModeType devModeType; public Builder addBuildChainCustomizer(Consumer customizer) { this.buildChainCustomizers.add(customizer); @@ -221,6 +226,15 @@ public Builder setLaunchMode(LaunchMode launchMode) { return this; } + public DevModeType getDevModeType() { + return devModeType; + } + + public Builder setDevModeType(DevModeType devModeType) { + this.devModeType = devModeType; + return this; + } + public ClassLoader getClassLoader() { return classLoader; } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java index 4448a9223e7b6..0ccc9fa0812bd 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java @@ -39,7 +39,7 @@ void runtime(BuildProducer reinitialized) { private void registerIfExists(BuildProducer reinitialized, String className) { try { - Class.forName(className); + Class.forName(className, false, Thread.currentThread().getContextClassLoader()); reinitialized.produce(new RuntimeReinitializedClassBuildItem(className)); } catch (ClassNotFoundException ignored) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/TestConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/TestConfig.java index 63a9b3955392f..b619578c833a3 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/TestConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/TestConfig.java @@ -1,7 +1,10 @@ package io.quarkus.deployment; import java.time.Duration; +import java.util.List; +import java.util.Optional; +import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; @@ -27,8 +30,27 @@ public class TestConfig { String nativeImageProfile; /** - * The profile to use when testing using @QuarkusTest + * Profile related test settings */ - @ConfigItem(defaultValue = "test") - String profile; + @ConfigItem + Profile profile; + + @ConfigGroup + public static class Profile { + + /** + * The profile (dev, test or prod) to use when testing using @QuarkusTest + */ + @ConfigItem(name = ConfigItem.PARENT, defaultValue = "test") + String profile; + + /** + * The tags this profile is associated with. + * When the {@code quarkus.test.profile.tags} System property is set (its value is a comma separated list of strings) + * then Quarkus will only execute tests that are annotated with a {@code @TestProfile} that has at least one of the + * supplied (via the aforementioned system property) tags. + */ + @ConfigItem(defaultValue = "") + Optional> tags; + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/AdditionalClassLoaderResourcesBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/AdditionalClassLoaderResourcesBuildItem.java new file mode 100644 index 0000000000000..7a5ee94f3b42c --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/AdditionalClassLoaderResourcesBuildItem.java @@ -0,0 +1,19 @@ +package io.quarkus.deployment.builditem; + +import java.util.Map; + +import io.quarkus.builder.item.MultiBuildItem; + +public final class AdditionalClassLoaderResourcesBuildItem extends MultiBuildItem { + + final Map resources; + + public AdditionalClassLoaderResourcesBuildItem(Map resources) { + this.resources = resources; + } + + public Map getResources() { + return resources; + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/BytecodeTransformerBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/BytecodeTransformerBuildItem.java index 390548ecf653c..966114f1b8609 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/BytecodeTransformerBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/BytecodeTransformerBuildItem.java @@ -1,5 +1,6 @@ package io.quarkus.deployment.builditem; +import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; @@ -12,24 +13,33 @@ public final class BytecodeTransformerBuildItem extends MultiBuildItem { /** * If this is true it means the class should be loaded eagerly by a thread pool in dev mode * on multi threaded systems. - * + *

* Transformation is expensive, so doing it this way can speed up boot time. */ final boolean eager; final String classToTransform; final BiFunction visitorFunction; + /** + * Function that can be applied to the inout bytes before it is passed into ASM. This should only be used + * in very specific circumstances. At the moment the only known valid use case is jacoco, which needs + * access to the unmodified class file bytes. + */ + final BiFunction inputTransformer; + /** * A set of class names that need to be present in the const pool for the transformation to happen. These * need to be in JVM internal format. - * + *

* The transformation is only applied if at least one of the entries in the const pool is present - * + *

* Note that this is an optimisation, and if another transformer is transforming the class anyway then * this transformer will always be applied. */ final Set requireConstPoolEntry; + final boolean cacheable; + public BytecodeTransformerBuildItem(String classToTransform, BiFunction visitorFunction) { this(classToTransform, visitorFunction, null); @@ -45,12 +55,38 @@ public BytecodeTransformerBuildItem(boolean eager, String classToTransform, this(eager, classToTransform, visitorFunction, null); } + public BytecodeTransformerBuildItem(boolean eager, String classToTransform, + BiFunction visitorFunction, boolean cacheable) { + this(eager, classToTransform, visitorFunction, null, cacheable); + } + public BytecodeTransformerBuildItem(boolean eager, String classToTransform, BiFunction visitorFunction, Set requireConstPoolEntry) { + this(eager, classToTransform, visitorFunction, requireConstPoolEntry, false); + } + + public BytecodeTransformerBuildItem(boolean eager, String classToTransform, + BiFunction visitorFunction, Set requireConstPoolEntry, + boolean cacheable) { + Objects.requireNonNull(visitorFunction, "visitorFunction"); this.eager = eager; this.classToTransform = classToTransform; this.visitorFunction = visitorFunction; this.requireConstPoolEntry = requireConstPoolEntry; + this.cacheable = cacheable; + this.inputTransformer = null; + } + + public BytecodeTransformerBuildItem(Builder builder) { + this.eager = builder.eager; + this.classToTransform = builder.classToTransform; + this.visitorFunction = builder.visitorFunction; + this.requireConstPoolEntry = builder.requireConstPoolEntry; + this.cacheable = builder.cacheable; + this.inputTransformer = builder.inputTransformer; + if (visitorFunction == null && inputTransformer == null) { + throw new IllegalArgumentException("One of either visitorFunction or inputTransformer must be set"); + } } public String getClassToTransform() { @@ -68,4 +104,56 @@ public Set getRequireConstPoolEntry() { public boolean isEager() { return eager; } + + public boolean isCacheable() { + return cacheable; + } + + public BiFunction getInputTransformer() { + return inputTransformer; + } + + public static class Builder { + public BiFunction inputTransformer; + private String classToTransform; + private BiFunction visitorFunction; + private Set requireConstPoolEntry = null; + private boolean eager = false; + private boolean cacheable = false; + + public Builder setInputTransformer(BiFunction inputTransformer) { + this.inputTransformer = inputTransformer; + return this; + } + + public Builder setClassToTransform(String classToTransform) { + this.classToTransform = classToTransform; + return this; + } + + public Builder setVisitorFunction(BiFunction visitorFunction) { + this.visitorFunction = visitorFunction; + return this; + } + + public Builder setRequireConstPoolEntry(Set requireConstPoolEntry) { + this.requireConstPoolEntry = requireConstPoolEntry; + return this; + } + + public Builder setEager(boolean eager) { + this.eager = eager; + return this; + } + + public Builder setCacheable(boolean cacheable) { + this.cacheable = cacheable; + return this; + } + + public BytecodeTransformerBuildItem build() { + return new BytecodeTransformerBuildItem(this); + } + } + } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/ChangedClassesBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ChangedClassesBuildItem.java new file mode 100644 index 0000000000000..1d1bf11f8614e --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ChangedClassesBuildItem.java @@ -0,0 +1,54 @@ +package io.quarkus.deployment.builditem; + +import java.util.Collections; +import java.util.Map; + +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * Represents the differences between classes in a dev mode restart. + * + * This can be used to avoid repeating work on restart, e.g. re-using + * old proxy definitions if nothing has changed for a given class. + * + * This will not always be present, it must be injected as an + * optional dependency. + * + * This will never be generated if the previous restart was a failure + * to avoid issues with inconsistent application state. + */ +public class ChangedClassesBuildItem extends SimpleBuildItem { + + private final Map changedClassesNewVersion; + private final Map changedClassesOldVersion; + private final Map deletedClasses; + private final Map addedClasses; + + public ChangedClassesBuildItem(Map changedClassesNewVersion, + Map changedClassesOldVersion, Map deletedClasses, + Map addedClasses) { + this.changedClassesNewVersion = changedClassesNewVersion; + this.changedClassesOldVersion = changedClassesOldVersion; + this.deletedClasses = deletedClasses; + this.addedClasses = addedClasses; + } + + public Map getChangedClassesNewVersion() { + return Collections.unmodifiableMap(changedClassesNewVersion); + } + + public Map getChangedClassesOldVersion() { + return Collections.unmodifiableMap(changedClassesOldVersion); + } + + public Map getDeletedClasses() { + return Collections.unmodifiableMap(deletedClasses); + } + + public Map getAddedClasses() { + return Collections.unmodifiableMap(addedClasses); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/CombinedIndexBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/CombinedIndexBuildItem.java index ebfe7c3d8a822..fc47bce41240e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/CombinedIndexBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/CombinedIndexBuildItem.java @@ -1,5 +1,6 @@ package io.quarkus.deployment.builditem; +import org.jboss.jandex.DotName; import org.jboss.jandex.IndexView; import io.quarkus.builder.item.SimpleBuildItem; @@ -13,6 +14,10 @@ * Compared to {@code BeanArchiveIndexBuildItem}, this index doesn't contain all CDI-related information. * On the other hand, it can contain classes from archives/dependencies that had no CDI component declared within them. * + * The computing index can also be used to index classes on demand. This when {@link IndexView#getClassByName(DotName)} + * is called. Note that this is a mutable index as this will add additional information, so in general this Index + * should only be used if you actually need it. + * * @see AdditionalApplicationArchiveMarkerBuildItem * @see IndexDependencyBuildItem */ @@ -20,11 +25,18 @@ public final class CombinedIndexBuildItem extends SimpleBuildItem { private final IndexView index; - public CombinedIndexBuildItem(IndexView index) { + private final IndexView computingIndex; + + public CombinedIndexBuildItem(IndexView index, IndexView computingIndex) { this.index = index; + this.computingIndex = computingIndex; } public IndexView getIndex() { return index; } + + public IndexView getComputingIndex() { + return computingIndex; + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedClassBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedClassBuildItem.java index 0d6a49142b71f..00fb49e11a23b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedClassBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedClassBuildItem.java @@ -14,6 +14,9 @@ public GeneratedClassBuildItem(boolean applicationClass, String name, byte[] cla } public GeneratedClassBuildItem(boolean applicationClass, String name, byte[] classData, String source) { + if (name.startsWith("/")) { + throw new IllegalArgumentException("Name cannot start with '/':" + name); + } this.applicationClass = applicationClass; this.name = name; this.classData = classData; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LaunchModeBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LaunchModeBuildItem.java index 309c36d0185ac..5fbcd30b00e54 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LaunchModeBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LaunchModeBuildItem.java @@ -1,6 +1,9 @@ package io.quarkus.deployment.builditem; +import java.util.Optional; + import io.quarkus.builder.item.SimpleBuildItem; +import io.quarkus.dev.spi.DevModeType; import io.quarkus.runtime.LaunchMode; /** @@ -10,11 +13,24 @@ public final class LaunchModeBuildItem extends SimpleBuildItem { private final LaunchMode launchMode; - public LaunchModeBuildItem(LaunchMode launchMode) { + private final Optional devModeType; + + public LaunchModeBuildItem(LaunchMode launchMode, Optional devModeType) { this.launchMode = launchMode; + this.devModeType = devModeType; } public LaunchMode getLaunchMode() { return launchMode; } + + /** + * The development mode type. + * + * Note that even for NORMAL launch modes this could be generating an application for the local side of remote + * dev mode, so this may be set even for launch mode normal. + */ + public Optional getDevModeType() { + return devModeType; + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LiveReloadBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LiveReloadBuildItem.java index 6949e615f827b..1d71b04646d94 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LiveReloadBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LiveReloadBuildItem.java @@ -5,6 +5,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import io.quarkus.bootstrap.app.ClassChangeInformation; import io.quarkus.builder.item.SimpleBuildItem; /** @@ -18,6 +19,7 @@ public final class LiveReloadBuildItem extends SimpleBuildItem { private final boolean liveReload; private final Set changedResources; private final Map, Object> reloadContext; + private final ClassChangeInformation changeInformation; /** * This constructor should only be used if live reload is not possible @@ -26,12 +28,15 @@ public LiveReloadBuildItem() { liveReload = false; changedResources = Collections.emptySet(); this.reloadContext = new ConcurrentHashMap<>(); + this.changeInformation = null; } - public LiveReloadBuildItem(boolean liveReload, Set changedResources, Map, Object> reloadContext) { + public LiveReloadBuildItem(boolean liveReload, Set changedResources, Map, Object> reloadContext, + ClassChangeInformation changeInformation) { this.liveReload = liveReload; this.changedResources = changedResources; this.reloadContext = reloadContext; + this.changeInformation = changeInformation; } /** @@ -71,4 +76,15 @@ public T getContextObject(Class type) { public void setContextObject(Class type, T val) { reloadContext.put(type, val); } + + /** + * Returns the change information from the last successful restart. + * + * Will be null if Quarkus has not previously successfully started, or if the + * previous attempt to start was a failure. + * + */ + public ClassChangeInformation getChangeInformation() { + return changeInformation; + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/NativeImageEnableAllTimeZonesBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/NativeImageEnableAllTimeZonesBuildItem.java index 4223eceba4d21..2a624e3ad8efa 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/NativeImageEnableAllTimeZonesBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/NativeImageEnableAllTimeZonesBuildItem.java @@ -2,6 +2,15 @@ import io.quarkus.builder.item.MultiBuildItem; +/** + * All timezones are now included in native executables by default so this build item does not change the build behavior + * anymore. + *

+ * Keeping it around for now as it marks the extension requiring all the timezones + * and better wait for the situation to fully settle on the GraalVM side + * before removing it entirely. + */ +@Deprecated public final class NativeImageEnableAllTimeZonesBuildItem extends MultiBuildItem { } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/ObjectSubstitutionBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ObjectSubstitutionBuildItem.java index ef465c6f3a84a..92d3b8602d1d2 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/ObjectSubstitutionBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ObjectSubstitutionBuildItem.java @@ -30,8 +30,9 @@ public Holder(Class from, Class to, Class> substi public final Holder holder; - public ObjectSubstitutionBuildItem(Class from, Class to, Class> substitution) { - holder = new Holder<>(from, to, substitution); + public ObjectSubstitutionBuildItem(Class from, Class to, + Class> substitution) { + holder = new Holder(from, to, substitution); } public ObjectSubstitutionBuildItem(Holder holder) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/TransformedClassesBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/TransformedClassesBuildItem.java index edad767ad912a..3f0aa90194fca 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/TransformedClassesBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/TransformedClassesBuildItem.java @@ -36,12 +36,16 @@ public Map> getTransformedFilesByJar() { public static class TransformedClass { + private final String className; private final byte[] data; private final String fileName; + private final boolean eager; - public TransformedClass(byte[] data, String fileName) { + public TransformedClass(String className, byte[] data, String fileName, boolean eager) { + this.className = className; this.data = data; this.fileName = fileName; + this.eager = eager; } public byte[] getData() { @@ -52,6 +56,14 @@ public String getFileName() { return fileName; } + public String getClassName() { + return className; + } + + public boolean isEager() { + return eager; + } + @Override public boolean equals(Object o) { if (this == o) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceBuildItem.java index d2f8346418087..56ecff462a4d0 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceBuildItem.java @@ -8,6 +8,12 @@ /** * A build item that indicates that a static resource should be included in the native image + *

+ * Related build items: + *

    + *
  • Use {@link NativeImageResourceDirectoryBuildItem} if you need to add a directory of resources + *
  • Use {@link NativeImageResourcePatternsBuildItem} to select resource paths by regular expressions or globs + *
*/ public final class NativeImageResourceBuildItem extends MultiBuildItem { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceDirectoryBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceDirectoryBuildItem.java index 60847c404ac56..875e9ecd12745 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceDirectoryBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceDirectoryBuildItem.java @@ -4,6 +4,12 @@ /** * A build item that indicates that directory resources should be included in the native image + *

+ * Related build items: + *

    + *
  • Use {@link NativeImageResourceBuildItem} if you need to add a single resource + *
  • Use {@link NativeImageResourcePatternsBuildItem} to select resource paths by regular expressions or globs + *
*/ public final class NativeImageResourceDirectoryBuildItem extends MultiBuildItem { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourcePatternsBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourcePatternsBuildItem.java new file mode 100644 index 0000000000000..2b062767950af --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourcePatternsBuildItem.java @@ -0,0 +1,236 @@ +package io.quarkus.deployment.builditem.nativeimage; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.deployment.pkg.NativeConfig; +import io.quarkus.deployment.util.GlobUtil; + +/** + * A build item that indicates that a set of resource paths defined by regular expression patterns or globs should be + * included in the native image. + *

+ * Globs passed to the {@code includeGlob*()} methods of the {@link Builder} are transformed to regular expressions + * internally. See {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + *

+ * The patterns are passed to the native image builder using the {@code com.oracle.svm.hosted.ResourcesFeature} API. + * The same mechanism (and regular expression syntax) is used by {@code native-image}'s + * {@code -H:ResourceConfigurationFiles}, {@code -H:IncludeResources} and {@code -H:ExcludeResources} (since + * GraalVM 20.3.0) command line options. + *

+ * Related build items: + *

    + *
  • Use {@link NativeImageResourceBuildItem} if you need to add a single resource + *
  • Use {@link NativeImageResourceDirectoryBuildItem} if you need to add a directory of resources + *
+ */ +public final class NativeImageResourcePatternsBuildItem extends MultiBuildItem { + + private final List excludePatterns; + + private final List includePatterns; + + private NativeImageResourcePatternsBuildItem(List includePatterns, List excludePatterns) { + this.includePatterns = includePatterns; + this.excludePatterns = excludePatterns; + } + + public List getExcludePatterns() { + return excludePatterns; + } + + public List getIncludePatterns() { + return includePatterns; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private List excludePatterns = new ArrayList<>(); + private List includePatterns = new ArrayList<>(); + + public NativeImageResourcePatternsBuildItem build() { + final List incl = includePatterns; + includePatterns = null; + final List excl = excludePatterns; + excludePatterns = null; + return new NativeImageResourcePatternsBuildItem(Collections.unmodifiableList(incl), + Collections.unmodifiableList(excl)); + } + + /** + * Add a glob pattern for matching resource paths that should not be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param glob the glob pattern to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludeGlob(String glob) { + excludePatterns.add(GlobUtil.toRegexPattern(glob)); + return this; + } + + /** + * Add a collection of glob patterns for matching resource paths that should not be added to the + * native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param globs the glob patterns to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludeGlobs(Collection globs) { + globs.stream().map(GlobUtil::toRegexPattern).forEach(excludePatterns::add); + return this; + } + + /** + * Add an array of glob patterns for matching resource paths that should not be added to the + * native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param globs the glob patterns to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludeGlobs(String... globs) { + Stream.of(globs).map(GlobUtil::toRegexPattern).forEach(excludePatterns::add); + return this; + } + + /** + * Add a regular expression for matching resource paths that should not be added to the native + * image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The pattern must not start with slash. + * + * @param pattern the regular expression to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludePattern(String pattern) { + excludePatterns.add(pattern); + return this; + } + + /** + * Add a collection of regular expressions for matching resource paths that should not be added + * to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The pattern must not start with slash. + * + * @param patterns the regular expressions to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludePatterns(Collection patterns) { + excludePatterns.addAll(patterns); + return this; + } + + /** + * Add an array of regular expressions for matching resource paths that should not be added + * to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The pattern must not start with slash. + * + * @param patterns the regular expressions to add to the list of patterns to exclude + * @return this {@link Builder} + */ + public Builder excludePatterns(String... patterns) { + Stream.of(patterns).forEach(excludePatterns::add); + return this; + } + + /** + * Add a glob pattern for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param glob the glob pattern to add + * @return this {@link Builder} + */ + public Builder includeGlob(String glob) { + includePatterns.add(GlobUtil.toRegexPattern(glob)); + return this; + } + + /** + * Add a collection of glob patterns for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param globs the glob patterns to add + * @return this {@link Builder} + */ + public Builder includeGlobs(Collection globs) { + globs.stream().map(GlobUtil::toRegexPattern).forEach(includePatterns::add); + return this; + } + + /** + * Add an array of glob patterns for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. See + * {@link NativeConfig.ResourcesConfig#includes} for the supported glob syntax. + * + * @param globs the glob patterns to add + * @return this {@link Builder} + */ + public Builder includeGlobs(String... patterns) { + Stream.of(patterns).map(GlobUtil::toRegexPattern).forEach(includePatterns::add); + return this; + } + + /** + * Add a regular expression for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The pattern must not start with slash. + * + * @param pattern the regular expression to add + * @return this {@link Builder} + */ + public Builder includePattern(String pattern) { + includePatterns.add(pattern); + return this; + } + + /** + * Add a collection of regular expressions for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The patterns must not start with slash. + * + * @param patterns the regular expressions to add + * @return this {@link Builder} + */ + public Builder includePatterns(Collection patterns) { + includePatterns.addAll(patterns); + return this; + } + + /** + * Add an array of regular expressions for matching resource paths that should be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. The patterns must not start with slash. + * + * @param patterns the regular expressions to add + * @return this {@link Builder} + */ + public Builder includePatterns(String... patterns) { + Stream.of(patterns).forEach(includePatterns::add); + return this; + } + + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java index 3fd15dfa0be17..fa5740182743a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java @@ -7,7 +7,9 @@ import java.util.function.Predicate; import org.jboss.jandex.DotName; +import org.jboss.jandex.FieldInfo; import org.jboss.jandex.IndexView; +import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; import io.quarkus.builder.item.MultiBuildItem; @@ -31,43 +33,87 @@ */ public final class ReflectiveHierarchyBuildItem extends MultiBuildItem { + private static final String UNKNOWN_SOURCE = ""; + private final Type type; private final IndexView index; - private final Predicate ignorePredicate; + private final Predicate ignoreTypePredicate; + private final Predicate ignoreFieldPredicate; + private final Predicate ignoreMethodPredicate; private final String source; + /** + * @deprecated Use the Builder instead. + */ + @Deprecated public ReflectiveHierarchyBuildItem(Type type) { - this(type, DefaultIgnorePredicate.INSTANCE); + this(type, DefaultIgnoreTypePredicate.INSTANCE); } + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated public ReflectiveHierarchyBuildItem(Type type, IndexView index) { - this(type, index, DefaultIgnorePredicate.INSTANCE); + this(type, index, DefaultIgnoreTypePredicate.INSTANCE); } - public ReflectiveHierarchyBuildItem(Type type, Predicate ignorePredicate) { - this(type, ignorePredicate, null); + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated + public ReflectiveHierarchyBuildItem(Type type, Predicate ignoreTypePredicate) { + this(type, ignoreTypePredicate, UNKNOWN_SOURCE); } - public ReflectiveHierarchyBuildItem(Type type, IndexView index, Predicate ignorePredicate) { - this(type, index, ignorePredicate, null); + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated + public ReflectiveHierarchyBuildItem(Type type, IndexView index, Predicate ignoreTypePredicate) { + this(type, index, ignoreTypePredicate, UNKNOWN_SOURCE); } + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated public ReflectiveHierarchyBuildItem(Type type, String source) { - this(type, DefaultIgnorePredicate.INSTANCE, source); + this(type, DefaultIgnoreTypePredicate.INSTANCE, source); } + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated public ReflectiveHierarchyBuildItem(Type type, IndexView index, String source) { - this(type, index, DefaultIgnorePredicate.INSTANCE, source); + this(type, index, DefaultIgnoreTypePredicate.INSTANCE, source); + } + + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated + public ReflectiveHierarchyBuildItem(Type type, Predicate ignoreTypePredicate, String source) { + this(type, null, ignoreTypePredicate, source); } - public ReflectiveHierarchyBuildItem(Type type, Predicate ignorePredicate, String source) { - this(type, null, ignorePredicate, source); + /** + * @deprecated Use the Builder instead and provide a source for easy debugging. + */ + @Deprecated + public ReflectiveHierarchyBuildItem(Type type, IndexView index, Predicate ignoreTypePredicate, String source) { + this(type, index, ignoreTypePredicate, DefaultIgnoreFieldPredicate.INSTANCE, DefaultIgnoreMethodPredicate.INSTANCE, + source); } - public ReflectiveHierarchyBuildItem(Type type, IndexView index, Predicate ignorePredicate, String source) { + private ReflectiveHierarchyBuildItem(Type type, IndexView index, Predicate ignoreTypePredicate, + Predicate ignoreFieldPredicate, Predicate ignoreMethodPredicate, String source) { this.type = type; this.index = index; - this.ignorePredicate = ignorePredicate; + this.ignoreTypePredicate = ignoreTypePredicate; + this.ignoreFieldPredicate = ignoreFieldPredicate; + this.ignoreMethodPredicate = ignoreMethodPredicate; this.source = source; } @@ -79,8 +125,16 @@ public IndexView getIndex() { return index; } - public Predicate getIgnorePredicate() { - return ignorePredicate; + public Predicate getIgnoreTypePredicate() { + return ignoreTypePredicate; + } + + public Predicate getIgnoreFieldPredicate() { + return ignoreFieldPredicate; + } + + public Predicate getIgnoreMethodPredicate() { + return ignoreMethodPredicate; } public boolean hasSource() { @@ -91,12 +145,58 @@ public String getSource() { return source; } - public static class DefaultIgnorePredicate implements Predicate { + public static class Builder { + + private Type type; + private IndexView index; + private Predicate ignoreTypePredicate = DefaultIgnoreTypePredicate.INSTANCE; + private Predicate ignoreFieldPredicate = DefaultIgnoreFieldPredicate.INSTANCE; + private Predicate ignoreMethodPredicate = DefaultIgnoreMethodPredicate.INSTANCE; + private String source = UNKNOWN_SOURCE; + + public Builder type(Type type) { + this.type = type; + return this; + } + + public Builder index(IndexView index) { + this.index = index; + return this; + } + + public Builder ignoreTypePredicate(Predicate ignoreTypePredicate) { + this.ignoreTypePredicate = ignoreTypePredicate; + return this; + } + + public Builder ignoreFieldPredicate(Predicate ignoreFieldPredicate) { + this.ignoreFieldPredicate = ignoreFieldPredicate; + return this; + } + + public Builder ignoreMethodPredicate(Predicate ignoreMethodPredicate) { + this.ignoreMethodPredicate = ignoreMethodPredicate; + return this; + } + + public Builder source(String source) { + this.source = source; + return this; + } + + public ReflectiveHierarchyBuildItem build() { + return new ReflectiveHierarchyBuildItem(type, index, ignoreTypePredicate, ignoreFieldPredicate, + ignoreMethodPredicate, source); + } + } + + public static class DefaultIgnoreTypePredicate implements Predicate { - public static final DefaultIgnorePredicate INSTANCE = new DefaultIgnorePredicate(); + public static final DefaultIgnoreTypePredicate INSTANCE = new DefaultIgnoreTypePredicate(); private static final List DEFAULT_IGNORED_PACKAGES = Arrays.asList("java.", "io.reactivex.", - "org.reactivestreams.", "org.slf4j."); + "org.reactivestreams.", "org.slf4j.", "javax.json.", "com.fasterxml.jackson.databind.", + "io.vertx.core.json."); // if this gets more complicated we will need to move to some tree like structure static final Set WHITELISTED_FROM_IGNORED_PACKAGES = new HashSet<>( Arrays.asList("java.math.BigDecimal", "java.math.BigInteger")); @@ -126,7 +226,27 @@ public static class IgnoreWhiteListedPredicate implements Predicate { @Override public boolean test(DotName dotName) { - return DefaultIgnorePredicate.WHITELISTED_FROM_IGNORED_PACKAGES.contains(dotName.toString()); + return DefaultIgnoreTypePredicate.WHITELISTED_FROM_IGNORED_PACKAGES.contains(dotName.toString()); + } + } + + public static class DefaultIgnoreFieldPredicate implements Predicate { + + public static DefaultIgnoreFieldPredicate INSTANCE = new DefaultIgnoreFieldPredicate(); + + @Override + public boolean test(FieldInfo fieldInfo) { + return false; + } + } + + public static class DefaultIgnoreMethodPredicate implements Predicate { + + public static DefaultIgnoreMethodPredicate INSTANCE = new DefaultIgnoreMethodPredicate(); + + @Override + public boolean test(MethodInfo methodInfo) { + return false; } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedClassBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedClassBuildItem.java index 61ba6bfeb7f70..484dec11f422c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedClassBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedClassBuildItem.java @@ -2,6 +2,9 @@ import io.quarkus.builder.item.MultiBuildItem; +/** + * A class that will be initialized at runtime in native mode. + */ public final class RuntimeInitializedClassBuildItem extends MultiBuildItem { private final String className; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedPackageBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedPackageBuildItem.java new file mode 100644 index 0000000000000..25e9ccdcf7fb6 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeInitializedPackageBuildItem.java @@ -0,0 +1,24 @@ +package io.quarkus.deployment.builditem.nativeimage; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * A package that will be initialized at runtime in native mode. + *

+ * WARNING: this build item should not be used in Quarkus itself and is only provided + * to simplify the early stages of external extensions development. + *

+ * For Quarkus development, please take the time to surgically mark individual classes as runtime initialized. + */ +public final class RuntimeInitializedPackageBuildItem extends MultiBuildItem { + + private final String packageName; + + public RuntimeInitializedPackageBuildItem(String packageName) { + this.packageName = packageName; + } + + public String getPackageName() { + return packageName; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java index ae16002b04d96..47737daf0c42c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java @@ -6,18 +6,20 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; import java.util.Set; import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.deployment.util.ServiceUtil; /** * Represents a Service Provider registration. - * When processed, it embeds the service interface descriptor (META-INF/services/...) and allow reflection (instantiation only) - * on a set of provider - * classes. + * When processed, it embeds the service interface descriptor (META-INF/services/...) in the native image + * and registers the classes returned by {@link #providers()} for reflection (instantiation only). */ public final class ServiceProviderBuildItem extends MultiBuildItem { @@ -57,7 +59,36 @@ public static ServiceProviderBuildItem allProviders(final String serviceInterfac classNames.add(line); } } - return new ServiceProviderBuildItem(serviceInterfaceClassName, new ArrayList<>(classNames)); + return new ServiceProviderBuildItem(serviceInterfaceClassName, + Collections.unmodifiableList(new ArrayList<>(classNames)), false); + } + + /** + * Creates and returns a new {@link ServiceProviderBuildItem} for the given {@code serviceInterfaceClassName} by + * including all the providers that are listed in service interface descriptor files + * {@code "META-INF/services/" + serviceInterfaceClassName} findable in the Context Class Loader of the current + * thread. + * + * @param serviceInterfaceClassName the interface whose service interface descriptor file we want to embed + * @return a new {@link ServiceProviderBuildItem} + * @throws RuntimeException wrapping any {@link IOException}s thrown when accessing class path resources + */ + public static ServiceProviderBuildItem allProvidersFromClassPath(final String serviceInterfaceClassName) { + if (serviceInterfaceClassName == null || serviceInterfaceClassName.trim().isEmpty()) { + throw new IllegalArgumentException("service interface name cannot be null or blank"); + } + final String resourcePath = SPI_ROOT + serviceInterfaceClassName; + try { + Set implementations = ServiceUtil.classNamesNamedIn( + Thread.currentThread().getContextClassLoader(), + resourcePath); + return new ServiceProviderBuildItem( + serviceInterfaceClassName, + Collections.unmodifiableList(new ArrayList<>(implementations)), + false); + } catch (IOException e) { + throw new RuntimeException("Could not read class path resources having path '" + resourcePath + "'", e); + } } /** @@ -69,7 +100,19 @@ public static ServiceProviderBuildItem allProviders(final String serviceInterfac * @param providerClassNames the list of provider class names that must already be mentioned in the file */ public ServiceProviderBuildItem(String serviceInterfaceClassName, String... providerClassNames) { - this(serviceInterfaceClassName, Arrays.asList(providerClassNames)); + this(serviceInterfaceClassName, Collections.unmodifiableList(Arrays.asList(providerClassNames)), false); + } + + /** + * Registers the specified service interface descriptor to be embedded and allow reflection (instantiation only) + * of the specified provider classes. Note that the service interface descriptor file has to exist and match the + * list of specified provider class names. + * + * @param serviceInterfaceClassName the interface whose service interface descriptor file we want to embed + * @param providers a collection of provider class names that must already be mentioned in the file + */ + public ServiceProviderBuildItem(String serviceInterfaceClassName, Collection providers) { + this(serviceInterfaceClassName, Collections.unmodifiableList(new ArrayList<>(providers)), false); } /** @@ -81,6 +124,18 @@ public ServiceProviderBuildItem(String serviceInterfaceClassName, String... prov * @param providers the list of provider class names that must already be mentioned in the file */ public ServiceProviderBuildItem(String serviceInterfaceClassName, List providers) { + this(serviceInterfaceClassName, Collections.unmodifiableList(new ArrayList<>(providers)), false); + } + + /** + * An internal overload that must be called with an immutable {@link List} of {@code providers} + * + * @param serviceInterfaceClassName the interface whose service interface descriptor file we want to embed + * @param providers the list of provider class names that must already be mentioned in the file + * @param marker just a way to differentiate this constructor from {@link #ServiceProviderBuildItem(String, List)}; + * the value is ignored + */ + private ServiceProviderBuildItem(String serviceInterfaceClassName, List providers, boolean marker) { this.serviceInterface = Objects.requireNonNull(serviceInterfaceClassName, "The service interface must not be `null`"); this.providers = providers; @@ -96,10 +151,16 @@ public ServiceProviderBuildItem(String serviceInterfaceClassName, List p }); } + /** + * @return a immutable {@link List} of provider class names + */ public List providers() { return providers; } + /** + * @return the resource path for the service descriptor file + */ public String serviceDescriptorFile() { return SPI_ROOT + serviceInterface; } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/codegen/CodeGenData.java b/core/deployment/src/main/java/io/quarkus/deployment/codegen/CodeGenData.java index 1da291b8d2454..32f19b984e52d 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/codegen/CodeGenData.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/codegen/CodeGenData.java @@ -9,11 +9,21 @@ public class CodeGenData { public final Path outPath; public final Path sourceDir; public final Path buildDir; + public boolean redirectIO; public CodeGenData(CodeGenProvider provider, Path outPath, Path sourceDir, Path buildDir) { + this(provider, outPath, sourceDir, buildDir, true); + } + + public CodeGenData(CodeGenProvider provider, Path outPath, Path sourceDir, Path buildDir, boolean redirectIO) { this.provider = provider; this.outPath = outPath; this.sourceDir = sourceDir; - this.buildDir = buildDir; + this.buildDir = buildDir.normalize(); + this.redirectIO = redirectIO; + } + + public void setRedirectIO(boolean redirectIO) { + this.redirectIO = redirectIO; } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java index 6f86ea9b02e55..48572a88c31c7 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java @@ -24,6 +24,7 @@ import java.util.TreeMap; import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.Converter; import org.jboss.logging.Logger; import org.wildfly.common.Assert; @@ -133,6 +134,11 @@ public BuildTimeConfigurationReader(final List> configRoots) { private static void processClass(ClassDefinition.Builder builder, Class clazz, final Map, GroupDefinition> groups) { builder.setConfigurationClass(clazz); + processClassFields(builder, clazz, groups); + } + + private static void processClassFields(final ClassDefinition.Builder builder, final Class clazz, + final Map, GroupDefinition> groups) { for (Field field : clazz.getDeclaredFields()) { int mods = field.getModifiers(); if (Modifier.isStatic(mods)) { @@ -149,6 +155,10 @@ private static void processClass(ClassDefinition.Builder builder, Class clazz } builder.addMember(processValue(field, field.getGenericType(), groups)); } + Class superclass = clazz.getSuperclass(); + if (superclass != null) { + processClassFields(builder, superclass, groups); + } } private static ClassDefinition.ClassMember.Specification processValue(Field field, Type valueType, @@ -286,7 +296,11 @@ ReadResult run() { nameBuilder.setLength(len); } // sweep-up - for (String propertyName : config.getPropertyNames()) { + for (String propertyName : getAllProperties()) { + if (propertyName.equals(ConfigSource.CONFIG_ORDINAL)) { + continue; + } + NameIterator ni = new NameIterator(propertyName); if (ni.hasNext() && ni.nextSegmentEquals("quarkus")) { ni.next(); @@ -704,6 +718,19 @@ private Converter getConverter(SmallRyeConfig config, Field field, ConverterT convByType.put(valueType, converter); return converter; } + + /** + * We collect all properties from ConfigSources, because Config#getPropertyNames exclude the active profiled + * properties, meaning that the property is written in the default config source unprofiled. This may cause + * issues if we run with a different profile and fallback to defaults. + */ + private Set getAllProperties() { + Set properties = new HashSet<>(); + for (ConfigSource configSource : config.getConfigSources()) { + properties.addAll(configSource.getPropertyNames()); + } + return properties; + } } public static final class ReadResult { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java new file mode 100644 index 0000000000000..e2fc6200f68d1 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java @@ -0,0 +1,48 @@ +package io.quarkus.deployment.configuration; + +import java.util.Optional; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +/** + * WARNING: This is not normal quarkus config, this is only read from application.properties. + *

+ * This is because it is needed before any of the config infrastructure is setup. + */ +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public class ClassLoadingConfig { + + /** + * Artifacts that are loaded in a parent first manner. This can be used to work around issues where a given + * class needs to be loaded by the system ClassLoader. Note that if you + * make a library parent first all its dependencies should generally also be parent first. + *

+ * Artifacts should be configured as a comma separated list of artifact ids, with the group, artifact-id and optional + * classifier separated by a colon. + *

+ * WARNING: This config property can only be set in application.properties + */ + @ConfigItem(defaultValue = "") + public Optional parentFirstArtifacts; + + /** + * Artifacts that are loaded in the runtime ClassLoader in dev mode, so they will be dropped + * and recreated on change. + *

+ * This is an advanced option, it should only be used if you have a problem with + * libraries holding stale state between reloads. Note that if you use this any library that depends on the listed libraries + * will also need to be reloadable. + *

+ * This setting has no impact on production builds. + *

+ * Artifacts should be configured as a comma separated list of artifact ids, with the group, artifact-id and optional + * classifier separated by a colon. + *

+ * WARNING: This config property can only be set in application.properties + */ + @ConfigItem(defaultValue = "") + public Optional reloadableArtifacts; + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java index 078137859c1d2..9136c61a379c7 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java @@ -453,11 +453,19 @@ public void run() { final ConfigPatternMap runTimeIgnored = ConfigPatternMap .merge(ConfigPatternMap.merge(buildTimePatternMap, buildTimeRunTimePatternMap, combinator), bootstrapPatternMap, combinator); + final ConfigPatternMap bootstrapIgnored = ConfigPatternMap + .merge(ConfigPatternMap.merge(buildTimePatternMap, + buildTimeRunTimePatternMap, combinator), runTimePatternMap, combinator); final MethodDescriptor siParserBody = generateParserBody(buildTimeRunTimePatternMap, buildTimeRunTimeIgnored, - new StringBuilder("siParseKey"), false, false); + new StringBuilder("siParseKey"), false, Type.BUILD_TIME); final MethodDescriptor rtParserBody = generateParserBody(runTimePatternMap, runTimeIgnored, - new StringBuilder("rtParseKey"), false, true); + new StringBuilder("rtParseKey"), false, Type.RUNTIME); + MethodDescriptor bsParserBody = null; + if (bootstrapConfigSetupNeeded()) { + bsParserBody = generateParserBody(bootstrapPatternMap, bootstrapIgnored, + new StringBuilder("bsParseKey"), false, Type.BOOTSTRAP); + } // create the bootstrap config if necessary ResultHandle bootstrapBuilder = null; @@ -716,6 +724,11 @@ public void run() { // generate sweep for run time configSweepLoop(rtParserBody, readConfig, runTimeConfig); + if (bootstrapConfigSetupNeeded()) { + // generate sweep for bootstrap config + configSweepLoop(bsParserBody, readBootstrapConfig, bootstrapConfig); + } + // generate ensure-initialized method // the point of this method is simply to initialize the Config class // thus initializing the config infrastructure before anything requests it @@ -743,7 +756,7 @@ public void run() { // throw the proper exception final ResultHandle finalErrorMessageBuilder = isError.newInstance(SB_NEW); isError.invokeVirtualMethod(SB_APPEND_STRING, finalErrorMessageBuilder, isError - .load("One or more configuration errors has prevented the application from starting. The errors are:\n")); + .load("One or more configuration errors have prevented the application from starting. The errors are:\n")); isError.invokeVirtualMethod(SB_APPEND_STRING, finalErrorMessageBuilder, niceErrorMessage); final ResultHandle finalErrorMessage = isError.invokeVirtualMethod(OBJ_TO_STRING, finalErrorMessageBuilder); final ResultHandle configurationException = isError @@ -1073,7 +1086,7 @@ private void generateEmptyParsers(ClassCreator cc) { private MethodDescriptor generateParserBody(final ConfigPatternMap keyMap, final ConfigPatternMap ignoredMap, final StringBuilder methodName, final boolean dynamic, - final boolean isRunTime) { + final Type type) { final Container matched = keyMap == null ? null : keyMap.getMatched(); final Object ignoreMatched = ignoredMap == null ? null : ignoredMap.getMatched(); @@ -1096,7 +1109,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke } } if (!needsCode) { - return isRunTime ? RT_EMPTY_PARSER : EMPTY_PARSER; + return (type == Type.BUILD_TIME) ? EMPTY_PARSER : RT_EMPTY_PARSER; } } } @@ -1123,7 +1136,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke matchedBody.invokeVirtualMethod(NI_PREVIOUS, keyIter); } // we have to get or create all containing (and contained) groups of this member - matchedBody.invokeStaticMethod(generateGetEnclosing(fieldContainer, isRunTime), keyIter, + matchedBody.invokeStaticMethod(generateGetEnclosing(fieldContainer, type), keyIter, config); } // else ignore (already populated eagerly) @@ -1134,7 +1147,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke final ResultHandle lastSeg = matchedBody.invokeVirtualMethod(NI_GET_PREVIOUS_SEGMENT, keyIter); matchedBody.invokeVirtualMethod(NI_PREVIOUS, keyIter); final ResultHandle mapHandle = matchedBody - .invokeStaticMethod(generateGetEnclosing(mapContainer, isRunTime), keyIter, config); + .invokeStaticMethod(generateGetEnclosing(mapContainer, type), keyIter, config); // populate the map final Field field = mapContainer.findField(); final FieldDescriptor fd = getOrCreateConverterInstance(field); @@ -1145,7 +1158,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke } } else if (ignoreMatched == null) { // name is unknown - matchedBody.invokeStaticMethod(isRunTime ? CD_UNKNOWN_RT : CD_UNKNOWN, keyIter); + matchedBody.invokeStaticMethod(type == Type.BUILD_TIME ? CD_UNKNOWN : CD_UNKNOWN_RT, keyIter); } // return; matchedBody.returnValue(null); @@ -1172,7 +1185,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke nameMatched.invokeStaticMethod( generateParserBody(keyMap.getChild(name), ignoredMap == null ? null : ignoredMap.getChild(name), methodName, dynamic, - isRunTime), + type), config, keyIter); methodName.setLength(length); // return; @@ -1205,7 +1218,7 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke final int length = methodName.length(); methodName.append(':').append(name); nameMatched.invokeStaticMethod( - generateParserBody(null, ignoredMap.getChild(name), methodName, false, isRunTime), + generateParserBody(null, ignoredMap.getChild(name), methodName, false, type), config, keyIter); methodName.setLength(length); // return; @@ -1229,27 +1242,27 @@ private MethodDescriptor generateParserBody(final ConfigPatternMap ke generateParserBody(keyMap == null ? null : keyMap.getChild(ConfigPatternMap.WILD_CARD), ignoredMap == null ? null : ignoredMap.getChild(ConfigPatternMap.WILD_CARD), methodName, - true, isRunTime), + true, type), config, keyIter); methodName.setLength(length); // return; matchedBody.returnValue(null); } } - body.invokeStaticMethod(isRunTime ? CD_UNKNOWN_RT : CD_UNKNOWN, keyIter); + body.invokeStaticMethod(type == Type.BUILD_TIME ? CD_UNKNOWN : CD_UNKNOWN_RT, keyIter); body.returnValue(null); return body.getMethodDescriptor(); } } - private MethodDescriptor generateGetEnclosing(final FieldContainer matchNode, final boolean isRunTime) { + private MethodDescriptor generateGetEnclosing(final FieldContainer matchNode, final Type type) { // name iterator cursor is placed BEFORE the field name on entry MethodDescriptor md = enclosingMemberMethods.get(matchNode); if (md != null) { return md; } md = MethodDescriptor.ofMethod(CONFIG_CLASS_NAME, - (isRunTime ? "rt" : "si") + "GetEnclosing:" + matchNode.getCombinedName(), Object.class, + type.methodPrefix + "GetEnclosing:" + matchNode.getCombinedName(), Object.class, NameIterator.class, SmallRyeConfig.class); try (MethodCreator mc = cc.getMethodCreator(md)) { mc.setModifiers(Opcodes.ACC_STATIC); @@ -1271,7 +1284,7 @@ private MethodDescriptor generateGetEnclosing(final FieldContainer matchNode, fi // consume segment mc.invokeVirtualMethod(NI_PREVIOUS, keyIter); } - final ResultHandle enclosing = mc.invokeStaticMethod(generateGetEnclosing(fieldContainer, isRunTime), + final ResultHandle enclosing = mc.invokeStaticMethod(generateGetEnclosing(fieldContainer, type), keyIter, config); final ResultHandle fieldVal; @@ -1330,7 +1343,7 @@ private MethodDescriptor generateGetEnclosing(final FieldContainer matchNode, fi final ResultHandle key = mc.invokeVirtualMethod(NI_GET_PREVIOUS_SEGMENT, keyIter); // consume segment mc.invokeVirtualMethod(NI_PREVIOUS, keyIter); - final ResultHandle map = mc.invokeStaticMethod(generateGetEnclosing(mapContainer, isRunTime), keyIter, + final ResultHandle map = mc.invokeStaticMethod(generateGetEnclosing(mapContainer, type), keyIter, config); // restore mc.invokeVirtualMethod(NI_NEXT, keyIter); @@ -1359,14 +1372,14 @@ private MethodDescriptor generateGetEnclosing(final FieldContainer matchNode, fi return md; } - private MethodDescriptor generateGetEnclosing(final MapContainer matchNode, final boolean isRunTime) { + private MethodDescriptor generateGetEnclosing(final MapContainer matchNode, final Type type) { // name iterator cursor is placed BEFORE the map key on entry MethodDescriptor md = enclosingMemberMethods.get(matchNode); if (md != null) { return md; } md = MethodDescriptor.ofMethod(CONFIG_CLASS_NAME, - (isRunTime ? "rt" : "si") + "GetEnclosing:" + matchNode.getCombinedName(), Object.class, + type.methodPrefix + "GetEnclosing:" + matchNode.getCombinedName(), Object.class, NameIterator.class, SmallRyeConfig.class); try (MethodCreator mc = cc.getMethodCreator(md)) { mc.setModifiers(Opcodes.ACC_STATIC); @@ -1380,7 +1393,7 @@ private MethodDescriptor generateGetEnclosing(final MapContainer matchNode, fina // consume segment mc.invokeVirtualMethod(NI_PREVIOUS, keyIter); } - final ResultHandle enclosing = mc.invokeStaticMethod(generateGetEnclosing(fieldContainer, isRunTime), + final ResultHandle enclosing = mc.invokeStaticMethod(generateGetEnclosing(fieldContainer, type), keyIter, config); if (!fieldContainer.getClassMember().getPropertyName().isEmpty()) { // restore @@ -1403,7 +1416,7 @@ private MethodDescriptor generateGetEnclosing(final MapContainer matchNode, fina final ResultHandle key = mc.invokeVirtualMethod(NI_GET_PREVIOUS_SEGMENT, keyIter); // consume enclosing map key mc.invokeVirtualMethod(NI_PREVIOUS, keyIter); - final ResultHandle map = mc.invokeStaticMethod(generateGetEnclosing(mapContainer, isRunTime), keyIter, + final ResultHandle map = mc.invokeStaticMethod(generateGetEnclosing(mapContainer, type), keyIter, config); // restore mc.invokeVirtualMethod(NI_NEXT, keyIter); @@ -1630,4 +1643,16 @@ private static boolean isFieldEligibleForDirectAccess(ClassDefinition.ClassMembe && Modifier.isPublic(classMember.getEnclosingDefinition().getConfigurationClass().getModifiers()) && Modifier.isPublic(classMember.getField().getType().getModifiers()); } + + private enum Type { + BUILD_TIME("si"), + BOOTSTRAP("bs"), + RUNTIME("rt"); + + final String methodPrefix; + + Type(String methodPrefix) { + this.methodPrefix = methodPrefix; + } + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/TestConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/TestConfig.java new file mode 100644 index 0000000000000..de0fee867fb46 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/TestConfig.java @@ -0,0 +1,21 @@ +package io.quarkus.deployment.configuration; + +import java.time.Duration; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot +public class TestConfig { + + /** + * Configures the hang detection in @QuarkusTest. If no activity happens (i.e. no test callbacks are called) over + * this period then QuarkusTest will dump all threads stack traces, to help diagnose a potential hang. + * + * Note that the initial timeout (before Quarkus has started) will only apply if provided by a system property, as + * it is not possible to read all config sources until Quarkus has booted. + */ + @ConfigItem(defaultValue = "10m") + Duration hangDetectionTimeout; + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java index 3049c764e71a4..33d1713e78f6a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java @@ -90,7 +90,7 @@ static abstract class LeafMember extends ClassMember { this.field = Assert.checkNotNullParam("field", field); final Class declaringClass = field.getDeclaringClass(); final Class configurationClass = classDefinition.configurationClass; - if (declaringClass != configurationClass) { + if (!declaringClass.isAssignableFrom(configurationClass)) { throw new IllegalArgumentException( "Member declaring " + declaringClass + " does not match configuration " + configurationClass); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java index 8d39d9c48558b..f8b8f9956488f 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java @@ -73,7 +73,7 @@ public final class RootDefinition extends ClassDefinition { rootName = String.join("-", (Iterable) () -> lowerCase(trimmedSegments.iterator())); } this.rootName = rootName; - this.descriptor = FieldDescriptor.of(CONFIG_CLASS_NAME, String.join("", segments), Object.class); + this.descriptor = FieldDescriptor.of(CONFIG_CLASS_NAME, String.join("", segments), configClass); } public ConfigPhase getConfigPhase() { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/AlwaysFalsePredicate.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/AlwaysFalsePredicate.java new file mode 100644 index 0000000000000..387b48a7764c2 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/AlwaysFalsePredicate.java @@ -0,0 +1,11 @@ +package io.quarkus.deployment.dev; + +import java.util.function.Predicate; + +public class AlwaysFalsePredicate implements Predicate { + + @Override + public boolean test(Object o) { + return false; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassComparisonUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassComparisonUtil.java new file mode 100644 index 0000000000000..afb543f8dc541 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassComparisonUtil.java @@ -0,0 +1,172 @@ +package io.quarkus.deployment.dev; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.AnnotationValue; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; +import org.jboss.jandex.FieldInfo; +import org.jboss.jandex.MethodInfo; +import org.jboss.jandex.Type; + +public class ClassComparisonUtil { + static boolean isSameStructure(ClassInfo clazz, ClassInfo old) { + if (clazz.flags() != old.flags()) { + return false; + } + if (!clazz.typeParameters().equals(old.typeParameters())) { + return false; + } + if (!clazz.interfaceNames().equals(old.interfaceNames())) { + return false; + } + if (!compareAnnotations(clazz.classAnnotations(), old.classAnnotations())) { + return false; + } + if (old.fields().size() != clazz.fields().size()) { + return false; + } + Map oldFields = old.fields().stream() + .collect(Collectors.toMap(FieldInfo::name, Function.identity())); + for (FieldInfo field : clazz.fields()) { + FieldInfo of = oldFields.get(field.name()); + if (of == null) { + return false; + } + if (of.flags() != field.flags()) { + return false; + } + if (!of.type().equals(field.type())) { + return false; + } + if (!compareAnnotations(of.annotations(), field.annotations())) { + return false; + } + } + for (MethodInfo method : clazz.methods()) { + MethodInfo om = null; + for (MethodInfo i : old.methods()) { + if (!i.name().equals(method.name())) { + continue; + } + if (!i.returnType().equals(method.returnType())) { + continue; + } + if (i.parameters().size() != method.parameters().size()) { + continue; + } + if (i.flags() != method.flags()) { + continue; + } + if (!Objects.equals(i.defaultValue(), method.defaultValue())) { + continue; + } + boolean paramEqual = true; + for (int j = 0; j < method.parameters().size(); ++j) { + Type a = method.parameters().get(j); + Type b = i.parameters().get(j); + if (!a.equals(b)) { + paramEqual = false; + break; + } + } + if (!paramEqual) { + continue; + } + if (!compareMethodAnnotations(i.annotations(), method.annotations())) { + continue; + } + om = i; + } + //no further checks needed, we fully matched in the loop + if (om == null) { + return false; + } + } + return true; + } + + static boolean compareAnnotations(Collection a, Collection b) { + if (a.size() != b.size()) { + return false; + } + Map lookup = b.stream() + .collect(Collectors.toMap(AnnotationInstance::name, Function.identity())); + + for (AnnotationInstance i1 : a) { + AnnotationInstance i2 = lookup.get(i1.name()); + if (i2 == null) { + return false; + } + if (!compareAnnotation(i1, i2)) { + return false; + } + } + return true; + } + + static boolean compareMethodAnnotations(Collection a, Collection b) { + if (a.size() != b.size()) { + return false; + } + List method1 = new ArrayList<>(); + Map> params1 = new HashMap<>(); + methodMap(a, method1, params1); + List method2 = new ArrayList<>(); + Map> params2 = new HashMap<>(); + methodMap(b, method2, params2); + if (!compareAnnotations(method1, method2)) { + return false; + } + if (!params1.keySet().equals(params2.keySet())) { + return false; + } + for (Map.Entry> entry : params1.entrySet()) { + List other = params2.get(entry.getKey()); + if (!compareAnnotations(other, entry.getValue())) { + return false; + } + } + return true; + } + + private static void methodMap(Collection b, List method2, + Map> params2) { + for (AnnotationInstance i : b) { + if (i.target().kind() == AnnotationTarget.Kind.METHOD) { + method2.add(i); + } else { + int index = i.target().asMethodParameter().position(); + List instances = params2.get(index); + if (instances == null) { + params2.put(index, instances = new ArrayList<>()); + } + instances.add(i); + } + } + } + + private static boolean compareAnnotation(AnnotationInstance a, AnnotationInstance b) { + List valuesA = a.values(); + List valuesB = b.values(); + if (valuesA.size() != valuesB.size()) { + return false; + } + for (AnnotationValue valueA : valuesA) { + AnnotationValue valueB = b.value(valueA.name()); + if (!valueA.equals(valueB)) { + return false; + } + } + return true; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassLoaderCompiler.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassLoaderCompiler.java index 7195bf7c3d8fb..6d0467009f4ea 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassLoaderCompiler.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassLoaderCompiler.java @@ -146,7 +146,7 @@ public ClassLoaderCompiler(ClassLoader classLoader, new CompilationProvider.Context( i.getName(), classPathElements, - new File(i.getProjectDirectory()), + i.getProjectDirectory() == null ? null : new File(i.getProjectDirectory()), new File(sourcePath), new File(i.getClassesPath()), context.getSourceEncoding(), diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassScanResult.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassScanResult.java new file mode 100644 index 0000000000000..efd46c589c567 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/ClassScanResult.java @@ -0,0 +1,39 @@ +package io.quarkus.deployment.dev; + +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; + +public class ClassScanResult { + final Set changedClasses = new HashSet<>(); + final Set deletedClasses = new HashSet<>(); + final Set addedClasses = new HashSet<>(); + final Set changedClassNames = new HashSet<>(); + final Set deletedClassNames = new HashSet<>(); + final Set addedClassNames = new HashSet<>(); + + public boolean isChanged() { + return !changedClasses.isEmpty() || !deletedClasses.isEmpty() || !addedClasses.isEmpty(); + } + + public void addDeletedClass(Path moduleClassesPath, Path classFilePath) { + deletedClasses.add(classFilePath); + deletedClassNames.add(toName(moduleClassesPath, classFilePath)); + } + + public void addChangedClass(Path moduleClassesPath, Path classFilePath) { + changedClasses.add(classFilePath); + changedClassNames.add(toName(moduleClassesPath, classFilePath)); + } + + public void addAddedClass(Path moduleClassesPath, Path classFilePath) { + addedClasses.add(classFilePath); + addedClassNames.add(toName(moduleClassesPath, classFilePath)); + } + + private String toName(Path moduleClassesPath, Path classFilePath) { + String cf = moduleClassesPath.relativize(classFilePath).toString() + .replace(moduleClassesPath.getFileSystem().getSeparator(), "."); + return cf.substring(0, cf.length() - ".class".length()); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/ConfigureDisableInstrumentationBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/ConfigureDisableInstrumentationBuildStep.java new file mode 100644 index 0000000000000..1c39b6806437a --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/ConfigureDisableInstrumentationBuildStep.java @@ -0,0 +1,39 @@ +package io.quarkus.deployment.dev; + +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.ServiceStartBuildItem; + +public class ConfigureDisableInstrumentationBuildStep { + + @BuildStep + ServiceStartBuildItem configure(List forIndexItems, + List forClassItems) { + if (forClassItems.isEmpty() && forIndexItems.isEmpty()) { + return null; + } + + RuntimeUpdatesProcessor processor = RuntimeUpdatesProcessor.INSTANCE; + if (processor != null) { + processor.setDisableInstrumentationForIndexPredicate(determineEffectivePredicate(forIndexItems)) + .setDisableInstrumentationForClassPredicate(determineEffectivePredicate(forClassItems)); + } + return null; + } + + private Predicate determineEffectivePredicate(List>> suppliers) { + if (suppliers.isEmpty()) { + return new AlwaysFalsePredicate<>(); + } else { + if (suppliers.size() == 1) { + return suppliers.get(0).get(); + } else { + return suppliers.stream().map(Supplier::get) + .reduce((c) -> false, Predicate::or); + } + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForClassPredicateBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForClassPredicateBuildItem.java new file mode 100644 index 0000000000000..af4ba383cdfc5 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForClassPredicateBuildItem.java @@ -0,0 +1,30 @@ +package io.quarkus.deployment.dev; + +import java.util.function.Predicate; +import java.util.function.Supplier; + +import org.jboss.jandex.ClassInfo; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * Allows disabling of instrumentation based reload if the changed class matches certain criteria + */ +public final class DisableInstrumentationForClassPredicateBuildItem extends MultiBuildItem + implements Supplier> { + + private final Predicate predicate; + + public DisableInstrumentationForClassPredicateBuildItem(Predicate predicate) { + this.predicate = predicate; + } + + public Predicate getPredicate() { + return predicate; + } + + @Override + public Predicate get() { + return getPredicate(); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForIndexPredicateBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForIndexPredicateBuildItem.java new file mode 100644 index 0000000000000..c73132516e83c --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForIndexPredicateBuildItem.java @@ -0,0 +1,30 @@ +package io.quarkus.deployment.dev; + +import java.util.function.Predicate; +import java.util.function.Supplier; + +import org.jboss.jandex.Index; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * Allows disabling of instrumentation based reload if the index of changed classes matches certain criteria + */ +public final class DisableInstrumentationForIndexPredicateBuildItem extends MultiBuildItem + implements Supplier> { + + private final Predicate predicate; + + public DisableInstrumentationForIndexPredicateBuildItem(Predicate predicate) { + this.predicate = predicate; + } + + public Predicate getPredicate() { + return predicate; + } + + @Override + public Predicate get() { + return getPredicate(); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/HotDeploymentConfigFileBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/HotDeploymentConfigFileBuildStep.java index e227702fecd71..203333a07f9b3 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/HotDeploymentConfigFileBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/HotDeploymentConfigFileBuildStep.java @@ -13,11 +13,12 @@ public class HotDeploymentConfigFileBuildStep { @BuildStep ServiceStartBuildItem setupConfigFileHotDeployment(List files) { // TODO: this should really be an output of the RuntimeRunner - RuntimeUpdatesProcessor processor = IsolatedDevModeMain.runtimeUpdatesProcessor; + RuntimeUpdatesProcessor processor = RuntimeUpdatesProcessor.INSTANCE; if (processor != null) { Map watchedFilePaths = files.stream() .collect(Collectors.toMap(HotDeploymentWatchedFileBuildItem::getLocation, - HotDeploymentWatchedFileBuildItem::isRestartNeeded)); + HotDeploymentWatchedFileBuildItem::isRestartNeeded, + (isRestartNeeded1, isRestartNeeded2) -> isRestartNeeded1 || isRestartNeeded2)); processor.setWatchedFilePaths(watchedFilePaths); } return null; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/IDEDevModeMain.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/IDEDevModeMain.java index 154e76d3cf1bb..6281bee827090 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/IDEDevModeMain.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/IDEDevModeMain.java @@ -1,5 +1,6 @@ package io.quarkus.deployment.dev; +import java.io.Closeable; import java.io.File; import java.nio.file.Path; import java.util.Collections; @@ -14,17 +15,19 @@ import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.model.AppArtifactKey; import io.quarkus.bootstrap.resolver.AppModelResolverException; -import io.quarkus.bootstrap.resolver.QuarkusGradleModelFactory; import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; +import io.quarkus.bootstrap.resolver.maven.workspace.LocalWorkspace; import io.quarkus.bootstrap.resolver.model.QuarkusModel; import io.quarkus.bootstrap.resolver.model.WorkspaceModule; import io.quarkus.bootstrap.util.QuarkusModelHelper; import io.quarkus.bootstrap.utils.BuildToolHelper; -@SuppressWarnings("unused") -public class IDEDevModeMain implements BiConsumer> { +public class IDEDevModeMain implements BiConsumer>, Closeable { private static final Logger log = Logger.getLogger(IDEDevModeMain.class.getName()); + private static final String APP_PROJECT = "app-project"; + + private IsolatedDevModeMain delegate; @Override public void accept(CuratedApplication curatedApplication, Map stringObjectMap) { @@ -33,38 +36,61 @@ public void accept(CuratedApplication curatedApplication, Map st devModeContext.setArgs((String[]) stringObjectMap.get("args")); try { if (BuildToolHelper.isMavenProject(appClasses)) { - LocalProject project = LocalProject.loadWorkspace(appClasses); + LocalProject project = (LocalProject) stringObjectMap.get(APP_PROJECT); + if (project == null) { + project = LocalProject.loadWorkspace(appClasses); + } + DevModeContext.ModuleInfo root = toModule(project); devModeContext.setApplicationRoot(root); - for (Map.Entry module : project.getWorkspace().getProjects().entrySet()) { - if (module.getKey().equals(project.getKey())) { + + final LocalWorkspace workspace = project.getWorkspace(); + for (AppArtifactKey localKey : curatedApplication.getAppModel().getLocalProjectArtifacts()) { + final LocalProject depProject = workspace.getProject(localKey.getGroupId(), localKey.getArtifactId()); + if (project == depProject) { continue; } - devModeContext.getAdditionalModules().add(toModule(module.getValue())); + if (depProject == null) { + throw new IllegalStateException( + "Failed to locate project dependency " + localKey + " in the workspace"); + } + devModeContext.getAdditionalModules().add(toModule(depProject)); + devModeContext.getLocalArtifacts().add(localKey); } } else { - // TODO find a way to reuse the previously model instead of building a new one. - QuarkusModel quarkusModel = QuarkusGradleModelFactory.createForTasks( - BuildToolHelper.getBuildFile(appClasses, BuildToolHelper.BuildTool.GRADLE).toFile(), - QuarkusModelHelper.DEVMODE_REQUIRED_TASKS); - final WorkspaceModule launchingModule = quarkusModel.getWorkspace().getMainModule(); - DevModeContext.ModuleInfo root = toModule(quarkusModel.getWorkspace().getMainModule()); + final QuarkusModel model = QuarkusModelHelper + .deserializeQuarkusModel((Path) stringObjectMap.get(QuarkusModelHelper.SERIALIZED_QUARKUS_MODEL)); + final WorkspaceModule launchingModule = model.getWorkspace().getMainModule(); + DevModeContext.ModuleInfo root = toModule(launchingModule); devModeContext.setApplicationRoot(root); - for (WorkspaceModule additionalModule : quarkusModel.getWorkspace().getAllModules()) { + for (WorkspaceModule additionalModule : model.getWorkspace().getAllModules()) { if (!additionalModule.getArtifactCoords().equals(launchingModule.getArtifactCoords())) { devModeContext.getAdditionalModules().add(toModule(additionalModule)); } } - } + } } catch (AppModelResolverException e) { log.error("Failed to load workspace, hot reload will not be available", e); } - new IsolatedDevModeMain().accept(curatedApplication, + terminateIfRunning(); + delegate = new IsolatedDevModeMain(); + delegate.accept(curatedApplication, Collections.singletonMap(DevModeContext.class.getName(), devModeContext)); } + @Override + public void close() { + terminateIfRunning(); + } + + private void terminateIfRunning() { + if (delegate != null) { + delegate.close(); + } + } + private DevModeContext.ModuleInfo toModule(WorkspaceModule module) throws BootstrapGradleException { AppArtifactKey key = new AppArtifactKey(module.getArtifactCoords().getGroupId(), module.getArtifactCoords().getArtifactId(), module.getArtifactCoords().getClassifier()); @@ -75,14 +101,17 @@ private DevModeContext.ModuleInfo toModule(WorkspaceModule module) throws Bootst sourceDirectories.add(srcDir.getPath()); sourceParents.add(srcDir.getParent()); } - + String resourceDirectory = null; + if (module.getSourceSet().getResourceDirectory() != null) { + resourceDirectory = module.getSourceSet().getResourceDirectory().getPath(); + } return new DevModeContext.ModuleInfo(key, module.getArtifactCoords().getArtifactId(), module.getProjectRoot().getPath(), sourceDirectories, QuarkusModelHelper.getClassPath(module).toAbsolutePath().toString(), module.getSourceSourceSet().getResourceDirectory().toString(), - module.getSourceSet().getResourceDirectory().getPath(), + resourceDirectory, sourceParents, module.getBuildDir().toPath().resolve("generated-sources").toAbsolutePath().toString(), module.getBuildDir().toString()); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedDevModeMain.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedDevModeMain.java index 08fc3d718e0d7..6dc5207d5fa9d 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedDevModeMain.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedDevModeMain.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; +import java.net.BindException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; @@ -19,6 +20,7 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.function.BiConsumer; +import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -27,6 +29,7 @@ import org.jboss.logging.Logger; import io.quarkus.bootstrap.app.AugmentAction; +import io.quarkus.bootstrap.app.ClassChangeInformation; import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.app.RunningQuarkusApplication; import io.quarkus.bootstrap.app.StartupAction; @@ -40,7 +43,9 @@ import io.quarkus.deployment.CodeGenerator; import io.quarkus.deployment.builditem.ApplicationClassPredicateBuildItem; import io.quarkus.deployment.codegen.CodeGenData; +import io.quarkus.deployment.steps.ClassTransformingBuildStep; import io.quarkus.deployment.util.FSWatchUtil; +import io.quarkus.dev.console.DevConsoleManager; import io.quarkus.dev.spi.DevModeType; import io.quarkus.dev.spi.HotReplacementSetup; import io.quarkus.runner.bootstrap.AugmentActionImpl; @@ -58,7 +63,6 @@ public class IsolatedDevModeMain implements BiConsumer hotReplacementSetups = new ArrayList<>(); private static volatile RunningQuarkusApplication runner; static volatile Throwable deploymentProblem; - static volatile RuntimeUpdatesProcessor runtimeUpdatesProcessor; private static volatile CuratedApplication curatedApplication; private static volatile AugmentAction augmentAction; private static volatile boolean restarting; @@ -69,6 +73,7 @@ private synchronized void firstStart(QuarkusClassLoader deploymentClassLoader, L ClassLoader old = Thread.currentThread().getContextClassLoader(); try { + boolean augmentDone = false; //ok, we have resolved all the deps try { StartupAction start = augmentAction.createInitialRuntimeApplication(); @@ -94,54 +99,61 @@ public void accept(Integer integer) { System.in.read(); } System.out.println("Restarting..."); - runtimeUpdatesProcessor.checkForChangedClasses(); - restartApp(runtimeUpdatesProcessor.checkForFileChange()); + RuntimeUpdatesProcessor.INSTANCE.checkForChangedClasses(); + restartApp(RuntimeUpdatesProcessor.INSTANCE.checkForFileChange(), null); } catch (Exception e) { log.error("Failed to restart", e); } } }); - startCodeGenWatcher(deploymentClassLoader, codeGens); - + startCodeGenWatcher(deploymentClassLoader, codeGens, context.getBuildSystemProperties()); + augmentDone = true; runner = start.runMainClass(context.getArgs()); firstStartCompleted = true; } catch (Throwable t) { - deploymentProblem = t; - if (context.isAbortOnFailedStart()) { - log.error("Failed to start quarkus", t); - } else { - //we need to set this here, while we still have the correct TCCL - //this is so the config is still valid, and we can read HTTP config from application.properties - log.info("Attempting to start hot replacement endpoint to recover from previous Quarkus startup failure"); - if (runtimeUpdatesProcessor != null) { - Thread.currentThread().setContextClassLoader(curatedApplication.getBaseRuntimeClassLoader()); - - try { - if (!InitialConfigurator.DELAYED_HANDLER.isActivated()) { - Class cl = Thread.currentThread().getContextClassLoader() - .loadClass(LoggingSetupRecorder.class.getName()); - cl.getMethod("handleFailedStart").invoke(null); + Throwable rootCause = t; + while (rootCause.getCause() != null) { + rootCause = rootCause.getCause(); + } + if (!(rootCause instanceof BindException)) { + deploymentProblem = t; + if (!augmentDone) { + log.error("Failed to start quarkus", t); + } + if (!context.isAbortOnFailedStart()) { + //we need to set this here, while we still have the correct TCCL + //this is so the config is still valid, and we can read HTTP config from application.properties + log.info( + "Attempting to start hot replacement endpoint to recover from previous Quarkus startup failure"); + if (RuntimeUpdatesProcessor.INSTANCE != null) { + Thread.currentThread().setContextClassLoader(curatedApplication.getBaseRuntimeClassLoader()); + try { + if (!InitialConfigurator.DELAYED_HANDLER.isActivated()) { + Class cl = Thread.currentThread().getContextClassLoader() + .loadClass(LoggingSetupRecorder.class.getName()); + cl.getMethod("handleFailedStart").invoke(null); + } + RuntimeUpdatesProcessor.INSTANCE.startupFailed(); + } catch (Exception e) { + close(); + log.error("Failed to recover after failed start", e); + //this is the end of the road, we just exit + //generally we only hit this if something is already listening on the HTTP port + //or the system config is so broken we can't start HTTP + System.exit(1); } - runtimeUpdatesProcessor.startupFailed(); - } catch (Exception e) { - close(); - log.error("Failed to recover after failed start", e); - //this is the end of the road, we just exit - //generally we only hit this if something is already listening on the HTTP port - //or the system config is so broken we can't start HTTP - System.exit(1); } } } - } } finally { Thread.currentThread().setContextClassLoader(old); } } - private void startCodeGenWatcher(QuarkusClassLoader classLoader, List codeGens) { + private void startCodeGenWatcher(QuarkusClassLoader classLoader, List codeGens, + Map properties) { Collection watchers = new ArrayList<>(); for (CodeGenData codeGen : codeGens) { @@ -150,7 +162,7 @@ private void startCodeGenWatcher(QuarkusClassLoader classLoader, List changedResources) { + public void restartCallback(Set changedResources, ClassScanResult result) { + restartApp(changedResources, + new ClassChangeInformation(result.changedClassNames, result.deletedClassNames, result.addedClassNames)); + } + + public synchronized void restartApp(Set changedResources, ClassChangeInformation classChangeInformation) { restarting = true; stop(); Timing.restart(curatedApplication.getAugmentClassLoader()); @@ -169,14 +186,21 @@ public synchronized void restartApp(Set changedResources) { //ok, we have resolved all the deps try { - StartupAction start = augmentAction.reloadExistingApplication(firstStartCompleted, changedResources); + StartupAction start = augmentAction.reloadExistingApplication(firstStartCompleted, changedResources, + classChangeInformation); runner = start.runMainClass(context.getArgs()); firstStartCompleted = true; } catch (Throwable t) { deploymentProblem = t; - log.error("Failed to start quarkus", t); - Thread.currentThread().setContextClassLoader(curatedApplication.getAugmentClassLoader()); - LoggingSetupRecorder.handleFailedStart(); + Throwable rootCause = t; + while (rootCause.getCause() != null) { + rootCause = rootCause.getCause(); + } + if (!(rootCause instanceof BindException)) { + log.error("Failed to start quarkus", t); + Thread.currentThread().setContextClassLoader(curatedApplication.getAugmentClassLoader()); + LoggingSetupRecorder.handleFailedStart(); + } } } finally { restarting = false; @@ -202,7 +226,12 @@ private RuntimeUpdatesProcessor setupRuntimeCompilation(DevModeContext context, return null; } RuntimeUpdatesProcessor processor = new RuntimeUpdatesProcessor(appRoot, context, compiler, - devModeType, this::restartApp, null); + devModeType, this::restartCallback, null, new BiFunction() { + @Override + public byte[] apply(String s, byte[] bytes) { + return ClassTransformingBuildStep.transform(s, bytes); + } + }); for (HotReplacementSetup service : ServiceLoader.load(HotReplacementSetup.class, curatedApplication.getBaseRuntimeClassLoader())) { @@ -210,6 +239,8 @@ private RuntimeUpdatesProcessor setupRuntimeCompilation(DevModeContext context, service.setupHotDeployment(processor); processor.addHotReplacementSetup(service); } + DevConsoleManager.setQuarkusBootstrap(curatedApplication.getQuarkusBootstrap()); + DevConsoleManager.setHotReplacementContext(processor); return processor; } return null; @@ -250,7 +281,7 @@ public void close() { } finally { try { try { - runtimeUpdatesProcessor.close(); + RuntimeUpdatesProcessor.INSTANCE.close(); } catch (IOException e) { log.error("Failed to close compiler", e); } @@ -322,7 +353,8 @@ public boolean test(String s) { } }).produces(ApplicationClassPredicateBuildItem.class).build(); } - })); + }), + Collections.emptyList()); List codeGens = new ArrayList<>(); QuarkusClassLoader deploymentClassLoader = curatedApplication.createDeploymentClassLoader(); @@ -337,19 +369,20 @@ public boolean test(String s) { sourcePath -> module.addSourcePaths(singleton(sourcePath.toAbsolutePath().toString())))); } } - runtimeUpdatesProcessor = setupRuntimeCompilation(context, (Path) params.get(APP_ROOT), + RuntimeUpdatesProcessor.INSTANCE = setupRuntimeCompilation(context, (Path) params.get(APP_ROOT), (DevModeType) params.get(DevModeType.class.getName())); - if (runtimeUpdatesProcessor != null) { - runtimeUpdatesProcessor.checkForFileChange(); - runtimeUpdatesProcessor.checkForChangedClasses(); + if (RuntimeUpdatesProcessor.INSTANCE != null) { + RuntimeUpdatesProcessor.INSTANCE.checkForFileChange(); + RuntimeUpdatesProcessor.INSTANCE.checkForChangedClasses(); } firstStart(deploymentClassLoader, codeGens); // doStart(false, Collections.emptySet()); - if (deploymentProblem != null || runtimeUpdatesProcessor.getCompileProblem() != null) { + if (deploymentProblem != null || RuntimeUpdatesProcessor.INSTANCE.getCompileProblem() != null) { if (context.isAbortOnFailedStart()) { throw new RuntimeException( - deploymentProblem == null ? runtimeUpdatesProcessor.getCompileProblem() : deploymentProblem); + deploymentProblem == null ? RuntimeUpdatesProcessor.INSTANCE.getCompileProblem() + : deploymentProblem); } } Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedRemoteDevModeMain.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedRemoteDevModeMain.java index 630ffb6dc0314..e80b0411662f2 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedRemoteDevModeMain.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedRemoteDevModeMain.java @@ -22,6 +22,7 @@ import java.util.ServiceLoader; import java.util.Set; import java.util.function.BiConsumer; +import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; @@ -38,6 +39,7 @@ import io.quarkus.deployment.mutability.DevModeTask; import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.deployment.pkg.steps.JarResultBuildStep; +import io.quarkus.deployment.steps.ClassTransformingBuildStep; import io.quarkus.dev.spi.DevModeType; import io.quarkus.dev.spi.HotReplacementSetup; import io.quarkus.dev.spi.RemoteDevState; @@ -56,7 +58,6 @@ public class IsolatedRemoteDevModeMain implements BiConsumer hotReplacementSetups = new ArrayList<>(); static volatile Throwable deploymentProblem; - static volatile RuntimeUpdatesProcessor runtimeUpdatesProcessor; static volatile RemoteDevClient remoteDevClient; static volatile Closeable remoteDevClientSession; private static volatile CuratedApplication curatedApplication; @@ -130,6 +131,11 @@ private RuntimeUpdatesProcessor setupRuntimeCompilation(DevModeContext context, public void accept(DevModeContext.ModuleInfo moduleInfo, String s) { copiedStaticResources.computeIfAbsent(moduleInfo, ss -> new HashSet<>()).add(s); } + }, new BiFunction() { + @Override + public byte[] apply(String s, byte[] bytes) { + return ClassTransformingBuildStep.transform(s, bytes); + } }); for (HotReplacementSetup service : ServiceLoader.load(HotReplacementSetup.class, @@ -143,14 +149,14 @@ public void accept(DevModeContext.ModuleInfo moduleInfo, String s) { return null; } - void regenerateApplication(Set ignore) { + void regenerateApplication(Set ignore, ClassScanResult ignore2) { generateApplication(); } public void close() { try { try { - runtimeUpdatesProcessor.close(); + RuntimeUpdatesProcessor.INSTANCE.close(); } catch (IOException e) { log.error("Failed to close compiler", e); } @@ -190,11 +196,11 @@ public void accept(CuratedApplication o, Map o2) { } augmentAction = new AugmentActionImpl(curatedApplication); - runtimeUpdatesProcessor = setupRuntimeCompilation(context, appRoot); + RuntimeUpdatesProcessor.INSTANCE = setupRuntimeCompilation(context, appRoot); - if (runtimeUpdatesProcessor != null) { - runtimeUpdatesProcessor.checkForFileChange(); - runtimeUpdatesProcessor.checkForChangedClasses(); + if (RuntimeUpdatesProcessor.INSTANCE != null) { + RuntimeUpdatesProcessor.INSTANCE.checkForFileChange(); + RuntimeUpdatesProcessor.INSTANCE.checkForChangedClasses(); } JarResult result = generateApplication(); @@ -254,10 +260,10 @@ private RemoteDevClient.SyncResult runSync() { Set removed = new HashSet<>(); Map changed = new HashMap<>(); try { - boolean scanResult = runtimeUpdatesProcessor.doScan(true); + boolean scanResult = RuntimeUpdatesProcessor.INSTANCE.doScan(true); if (!scanResult && !copiedStaticResources.isEmpty()) { scanResult = true; - regenerateApplication(Collections.emptySet()); + regenerateApplication(Collections.emptySet(), new ClassScanResult()); } copiedStaticResources.clear(); if (scanResult) { @@ -291,7 +297,7 @@ public Set getRemovedFiles() { @Override public Throwable getProblem() { - return runtimeUpdatesProcessor.getDeploymentProblem(); + return RuntimeUpdatesProcessor.INSTANCE.getDeploymentProblem(); } }; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java index 149e86a37b4ba..47decbc6dbc70 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/JavaCompilationProvider.java @@ -19,6 +19,7 @@ import javax.tools.StandardLocation; import javax.tools.ToolProvider; +import org.jboss.logging.Logger; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; @@ -26,6 +27,8 @@ public class JavaCompilationProvider implements CompilationProvider { + private static final Logger log = Logger.getLogger(JavaCompilationProvider.class); + // -g is used to make the java compiler generate all debugging info // -parameters is used to generate metadata for reflection on method parameters // this is useful when people using debuggers against their hot-reloaded app @@ -71,12 +74,14 @@ public void compile(Set filesToCompile, Context context) { } for (Diagnostic diagnostic : diagnostics.getDiagnostics()) { - System.out.format("%s, line %d in %s", diagnostic.getMessage(null), diagnostic.getLineNumber(), + log.logf(diagnostic.getKind() == Diagnostic.Kind.ERROR ? Logger.Level.ERROR : Logger.Level.WARN, + "%s, line %d in %s", diagnostic.getMessage(null), diagnostic.getLineNumber(), diagnostic.getSource() == null ? "[unknown source]" : diagnostic.getSource().getName()); } if (!fileManagerDiagnostics.getDiagnostics().isEmpty()) { for (Diagnostic diagnostic : fileManagerDiagnostics.getDiagnostics()) { - System.out.format("%s, line %d in %s", diagnostic.getMessage(null), diagnostic.getLineNumber(), + log.logf(diagnostic.getKind() == Diagnostic.Kind.ERROR ? Logger.Level.ERROR : Logger.Level.WARN, + "%s, line %d in %s", diagnostic.getMessage(null), diagnostic.getLineNumber(), diagnostic.getSource() == null ? "[unknown source]" : diagnostic.getSource().getName()); } fileManager.close(); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/QuarkusDevModeLauncher.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/QuarkusDevModeLauncher.java new file mode 100644 index 0000000000000..b30ac9fac979a --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/QuarkusDevModeLauncher.java @@ -0,0 +1,462 @@ +package io.quarkus.deployment.dev; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.ObjectOutputStream; +import java.net.InetAddress; +import java.net.Socket; +import java.net.URI; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.jar.Attributes; +import java.util.jar.Manifest; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import org.apache.maven.shared.utils.cli.CommandLineUtils; + +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.deployment.dev.DevModeContext.ModuleInfo; +import io.quarkus.runtime.util.JavaVersionUtil; +import io.quarkus.utilities.JavaBinFinder; + +public abstract class QuarkusDevModeLauncher { + + public class Builder> { + + protected Builder(String java) { + args = new ArrayList<>(); + final String javaTool = java == null ? JavaBinFinder.findBin() : java; + QuarkusDevModeLauncher.this.debug("Using javaTool: %s", javaTool); + args.add(javaTool); + + } + + @SuppressWarnings("unchecked") + public B preventnoverify(boolean preventnoverify) { + if (!preventnoverify) { + // in Java 13 and up, preventing verification is deprecated - see https://bugs.openjdk.java.net/browse/JDK-8218003 + // this test isn't absolutely correct in the sense that depending on the user setup, the actual Java binary + // that is used might be different that the one running Maven, but given how small of an impact this has + // it's probably better than running an extra command on 'javaTool' just to figure out the version + if (!JavaVersionUtil.isJava13OrHigher()) { + args.add("-Xverify:none"); + } + } + return (B) this; + } + + @SuppressWarnings("unchecked") + public B jvmArgs(String jvmArgs) { + args.add(jvmArgs); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B jvmArgs(List jvmArgs) { + args.addAll(jvmArgs); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B debug(String debug) { + QuarkusDevModeLauncher.this.debug = debug; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B debugPortOk(Boolean debugPortOk) { + QuarkusDevModeLauncher.this.debugPortOk = debugPortOk; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B suspend(String suspend) { + QuarkusDevModeLauncher.this.suspend = suspend; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B projectDir(File projectDir) { + QuarkusDevModeLauncher.this.projectDir = projectDir; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B buildDir(File buildDir) { + QuarkusDevModeLauncher.this.buildDir = buildDir; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B outputDir(File outputDir) { + QuarkusDevModeLauncher.this.outputDir = outputDir; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B buildSystemProperties(Map buildSystemProperties) { + QuarkusDevModeLauncher.this.buildSystemProperties = buildSystemProperties; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B buildSystemProperty(String name, String value) { + QuarkusDevModeLauncher.this.buildSystemProperties.put(name, value); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B applicationName(String appName) { + QuarkusDevModeLauncher.this.applicationName = appName; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B applicationVersion(String appVersion) { + QuarkusDevModeLauncher.this.applicationVersion = appVersion; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B applicationArgs(String appArgs) { + QuarkusDevModeLauncher.this.applicationArgs = appArgs; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B sourceEncoding(String srcEncoding) { + QuarkusDevModeLauncher.this.sourceEncoding = srcEncoding; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B compilerOption(String option) { + compilerOptions.add(option); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B compilerOptions(List options) { + compilerOptions.addAll(options); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B compilerPluginArtifacts(List artifacts) { + compilerPluginArtifacts = artifacts; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B compilerPluginOptions(List options) { + compilerPluginOptions = options; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B sourceJavaVersion(String sourceJavaVersion) { + QuarkusDevModeLauncher.this.sourceJavaVersion = sourceJavaVersion; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B targetJavaVersion(String targetJavaVersion) { + QuarkusDevModeLauncher.this.targetJavaVersion = targetJavaVersion; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B watchedBuildFile(Path buildFile) { + QuarkusDevModeLauncher.this.buildFiles.add(buildFile); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B deleteDevJar(boolean deleteDevJar) { + QuarkusDevModeLauncher.this.deleteDevJar = deleteDevJar; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B baseName(String baseName) { + QuarkusDevModeLauncher.this.baseName = baseName; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B remoteDev(boolean remoteDev) { + QuarkusDevModeLauncher.this.remoteDev = remoteDev; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B localArtifact(AppArtifactKey localArtifact) { + localArtifacts.add(localArtifact); + return (B) this; + } + + public boolean isLocal(AppArtifactKey artifact) { + return localArtifacts.contains(artifact); + } + + @SuppressWarnings("unchecked") + public B mainModule(ModuleInfo mainModule) { + main = mainModule; + return (B) this; + } + + @SuppressWarnings("unchecked") + public B dependency(ModuleInfo module) { + dependencies.add(module); + return (B) this; + } + + @SuppressWarnings("unchecked") + public B classpathEntry(File f) { + classpath.add(f); + return (B) this; + } + + public B debugHost(String host) { + if ((null != host) && !host.isEmpty()) { + debugHost = host; + } + return (B) this; + } + + @SuppressWarnings("unchecked") + public R build() throws Exception { + prepare(); + return (R) QuarkusDevModeLauncher.this; + } + } + + private List args = new ArrayList<>(0); + private String debug; + private Boolean debugPortOk; + private String suspend; + private String debugHost = "localhost"; + private File projectDir; + private File buildDir; + private File outputDir; + private Map buildSystemProperties = new HashMap<>(0); + private String applicationName; + private String applicationVersion; + private String sourceEncoding; + private List compilerOptions = new ArrayList<>(0); + private List compilerPluginArtifacts; + private List compilerPluginOptions; + private String sourceJavaVersion; + private String targetJavaVersion; + private Set buildFiles = new HashSet<>(0); + private boolean deleteDevJar = true; + private String baseName; + private boolean remoteDev; + private String applicationArgs; + private Set localArtifacts = new HashSet<>(); + private ModuleInfo main; + private List dependencies = new ArrayList<>(0); + private List classpath = new ArrayList<>(0); + + protected QuarkusDevModeLauncher() { + } + + /** + * Attempts to prepare the dev mode runner. + */ + protected void prepare() throws Exception { + + // the following flags reduce startup time and are acceptable only for dev purposes + args.add("-XX:TieredStopAtLevel=1"); + + if (suspend != null) { + switch (suspend.toLowerCase(Locale.ENGLISH)) { + case "n": + case "false": { + suspend = "n"; + break; + } + case "y": + case "true": { + suspend = "y"; + break; + } + default: { + warn("Ignoring invalid value \"" + suspend + "\" for \"suspend\" param and defaulting to \"n\""); + suspend = "n"; + break; + } + } + } else { + suspend = "n"; + } + + if (debug == null) { + // debug mode not specified + // make sure 5005 is not used, we don't want to just fail if something else is using it + // we don't check this on restarts, as the previous process is still running + if (debugPortOk == null) { + try (Socket socket = new Socket(InetAddress.getByAddress(new byte[] { 127, 0, 0, 1 }), 5005)) { + error("Port 5005 in use, not starting in debug mode"); + debugPortOk = false; + } catch (IOException e) { + debugPortOk = true; + } + } + if (debugPortOk) { + args.add("-Xdebug"); + args.add("-Xrunjdwp:transport=dt_socket,address=" + debugHost + ":5005,server=y,suspend=" + suspend); + } + } else if (debug.toLowerCase().equals("client")) { + args.add("-Xdebug"); + args.add("-Xrunjdwp:transport=dt_socket,address=" + debugHost + ":5005,server=n,suspend=" + suspend); + } else if (debug.toLowerCase().equals("true")) { + args.add("-Xdebug"); + args.add("-Xrunjdwp:transport=dt_socket,address=" + debugHost + ":5005,server=y,suspend=" + suspend); + } else if (!debug.toLowerCase().equals("false")) { + try { + int port = Integer.parseInt(debug); + if (port <= 0) { + throw new Exception("The specified debug port must be greater than 0"); + } + args.add("-Xdebug"); + args.add("-Xrunjdwp:transport=dt_socket,address=" + debugHost + ":" + port + ",server=y,suspend=" + suspend); + } catch (NumberFormatException e) { + throw new Exception( + "Invalid value for debug parameter: " + debug + " must be true|false|client|{port}"); + } + } + + //build a class-path string for the base platform + //this stuff does not change + // Do not include URIs in the manifest, because some JVMs do not like that + StringBuilder classPathManifest = new StringBuilder(); + final DevModeContext devModeContext = new DevModeContext(); + for (Map.Entry e : System.getProperties().entrySet()) { + devModeContext.getSystemProperties().put(e.getKey().toString(), (String) e.getValue()); + } + devModeContext.setProjectDir(projectDir); + devModeContext.getBuildSystemProperties().putAll(buildSystemProperties); + + // this is a minor hack to allow ApplicationConfig to be populated with defaults + devModeContext.getBuildSystemProperties().putIfAbsent("quarkus.application.name", applicationName); + devModeContext.getBuildSystemProperties().putIfAbsent("quarkus.application.version", applicationVersion); + + devModeContext.setSourceEncoding(sourceEncoding); + devModeContext.setCompilerOptions(compilerOptions); + + if (compilerPluginArtifacts != null) { + devModeContext.setCompilerPluginArtifacts(compilerPluginArtifacts); + } + if (compilerPluginOptions != null) { + devModeContext.setCompilerPluginsOptions(compilerPluginOptions); + } + + devModeContext.setSourceJavaVersion(sourceJavaVersion); + devModeContext.setTargetJvmVersion(targetJavaVersion); + + devModeContext.getLocalArtifacts().addAll(localArtifacts); + devModeContext.setApplicationRoot(main); + devModeContext.getAdditionalModules().addAll(dependencies); + + args.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager"); + + File tempFile = new File(buildDir, applicationName + "-dev.jar"); + tempFile.delete(); + // Only delete the -dev.jar on exit if requested + if (deleteDevJar) { + tempFile.deleteOnExit(); + } + debug("Executable jar: %s", tempFile.getAbsolutePath()); + + devModeContext.setBaseName(baseName); + devModeContext.setCacheDir(new File(buildDir, "transformer-cache").getAbsoluteFile()); + + // this is the jar file we will use to launch the dev mode main class + devModeContext.setDevModeRunnerJarFile(tempFile); + + if (remoteDev) { + devModeContext.setMode(QuarkusBootstrap.Mode.REMOTE_DEV_CLIENT); + devModeContext.setAlternateEntryPoint(IsolatedRemoteDevModeMain.class.getName()); + } + + try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile))) { + out.putNextEntry(new ZipEntry("META-INF/")); + Manifest manifest = new Manifest(); + manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); + + if (!classpath.isEmpty()) { + classpath.forEach(file -> { + final URI uri = file.toPath().toAbsolutePath().toUri(); + classPathManifest.append(uri).append(" "); + }); + } + manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classPathManifest.toString()); + manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, DevModeMain.class.getName()); + out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF")); + manifest.write(out); + + out.putNextEntry(new ZipEntry(DevModeMain.DEV_MODE_CONTEXT)); + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes)); + obj.writeObject(devModeContext); + obj.close(); + out.write(bytes.toByteArray()); + } + + outputDir.mkdirs(); + // if the --enable-preview flag was set, then we need to enable it when launching dev mode as well + if (devModeContext.isEnablePreview()) { + args.add(DevModeContext.ENABLE_PREVIEW_FLAG); + } + + args.add("-jar"); + args.add(tempFile.getAbsolutePath()); + if (applicationArgs != null) { + args.addAll(Arrays.asList(CommandLineUtils.translateCommandline(applicationArgs))); + } + } + + public Collection watchedBuildFiles() { + return buildFiles; + } + + public List args() { + return args; + } + + protected abstract boolean isDebugEnabled(); + + protected void debug(Object msg, Object... args) { + if (!isDebugEnabled()) { + return; + } + if (msg == null) { + return; + } + if (args.length == 0) { + debug(msg); + return; + } + debug(String.format(msg.toString(), args)); + } + + protected abstract void debug(Object msg); + + protected abstract void error(Object msg); + + protected abstract void warn(Object msg); +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java index ff9c291504eee..6643cf3963bc3 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java @@ -3,11 +3,13 @@ import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.toList; +import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.UncheckedIOException; +import java.lang.instrument.ClassDefinition; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -25,13 +27,21 @@ import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; +import java.util.function.BiFunction; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.eclipse.microprofile.config.ConfigProvider; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.Index; +import org.jboss.jandex.IndexView; +import org.jboss.jandex.Indexer; import org.jboss.logging.Logger; import io.quarkus.bootstrap.runner.Timing; +import io.quarkus.changeagent.ClassChangeAgent; import io.quarkus.deployment.util.FSWatchUtil; import io.quarkus.deployment.util.FileUtil; import io.quarkus.dev.spi.DevModeType; @@ -44,6 +54,8 @@ public class RuntimeUpdatesProcessor implements HotReplacementContext, Closeable private static final String CLASS_EXTENSION = ".class"; + static volatile RuntimeUpdatesProcessor INSTANCE; + private final Path applicationRoot; private final DevModeContext context; private final ClassLoaderCompiler compiler; @@ -53,6 +65,9 @@ public class RuntimeUpdatesProcessor implements HotReplacementContext, Closeable // file path -> isRestartNeeded private volatile Map watchedFilePaths = Collections.emptyMap(); + private volatile Predicate disableInstrumentationForClassPredicate = new AlwaysFalsePredicate<>(); + private volatile Predicate disableInstrumentationForIndexPredicate = new AlwaysFalsePredicate<>(); + /** * A first scan is considered done when we have visited all modules at least once. * This is useful in two ways. @@ -76,25 +91,34 @@ public class RuntimeUpdatesProcessor implements HotReplacementContext, Closeable private final List preScanSteps = new CopyOnWriteArrayList<>(); private final List>> noRestartChangesConsumers = new CopyOnWriteArrayList<>(); private final List hotReplacementSetup = new ArrayList<>(); - private final Consumer> restartCallback; + private final BiConsumer, ClassScanResult> restartCallback; private final BiConsumer copyResourceNotification; + private final BiFunction classTransformers; + + /** + * The index for the last successful start. Used to determine if the class has changed its structure + * and determine if it is eligible for an instrumentation based reload. + */ + private static volatile IndexView lastStartIndex; public RuntimeUpdatesProcessor(Path applicationRoot, DevModeContext context, ClassLoaderCompiler compiler, - DevModeType devModeType, Consumer> restartCallback, - BiConsumer copyResourceNotification) { + DevModeType devModeType, BiConsumer, ClassScanResult> restartCallback, + BiConsumer copyResourceNotification, + BiFunction classTransformers) { this.applicationRoot = applicationRoot; this.context = context; this.compiler = compiler; this.devModeType = devModeType; this.restartCallback = restartCallback; this.copyResourceNotification = copyResourceNotification; + this.classTransformers = classTransformers; } @Override public Path getClassesDir() { //TODO: fix all these for (DevModeContext.ModuleInfo i : context.getAllModules()) { - return Paths.get(i.getResourcePath()); + return Paths.get(i.getClassesPath()); } return null; } @@ -158,6 +182,7 @@ public DevModeType getDevModeType() { @Override public boolean doScan(boolean userInitiated) throws IOException { + final long startNanoseconds = System.nanoTime(); for (Runnable step : preScanSteps) { try { @@ -167,20 +192,66 @@ public boolean doScan(boolean userInitiated) throws IOException { } } - boolean classChanged = checkForChangedClasses(); + ClassScanResult changedClassResults = checkForChangedClasses(); Set filesChanged = checkForFileChange(); + boolean configFileRestartNeeded = filesChanged.stream().map(watchedFilePaths::get).anyMatch(Boolean.TRUE::equals); + + boolean instrumentationChange = false; + if (ClassChangeAgent.getInstrumentation() != null && lastStartIndex != null && !configFileRestartNeeded + && devModeType != DevModeType.REMOTE_LOCAL_SIDE) { + //attempt to do an instrumentation based reload + //if only code has changed and not the class structure, then we can do a reload + //using the JDK instrumentation API (assuming we were started with the javaagent) + if (changedClassResults.deletedClasses.isEmpty() + && changedClassResults.addedClasses.isEmpty() + && !changedClassResults.changedClasses.isEmpty()) { + try { + Indexer indexer = new Indexer(); + //attempt to use the instrumentation API + ClassDefinition[] defs = new ClassDefinition[changedClassResults.changedClasses.size()]; + int index = 0; + for (Path i : changedClassResults.changedClasses) { + byte[] bytes = Files.readAllBytes(i); + String name = indexer.index(new ByteArrayInputStream(bytes)).name().toString(); + defs[index++] = new ClassDefinition(Thread.currentThread().getContextClassLoader().loadClass(name), + classTransformers.apply(name, bytes)); + } + Index current = indexer.complete(); + boolean ok = instrumentationEnabled() + && !disableInstrumentationForIndexPredicate.test(current); + if (ok) { + for (ClassInfo clazz : current.getKnownClasses()) { + ClassInfo old = lastStartIndex.getClassByName(clazz.name()); + if (!ClassComparisonUtil.isSameStructure(clazz, old) + || disableInstrumentationForClassPredicate.test(clazz)) { + ok = false; + break; + } + } + } + + if (ok) { + log.info("Application restart not required, replacing classes via instrumentation"); + ClassChangeAgent.getInstrumentation().redefineClasses(defs); + instrumentationChange = true; + } + } catch (Exception e) { + log.error("Failed to replace classes via instrumentation", e); + instrumentationChange = false; + } + } + } + //if there is a deployment problem we always restart on scan //this is because we can't setup the config file watches //in an ideal world we would just check every resource file for changes, however as everything is already //all broken we just assume the reason that they have refreshed is because they have fixed something //trying to watch all resource files is complex and this is likely a good enough solution for what is already an edge case - boolean restartNeeded = classChanged || (IsolatedDevModeMain.deploymentProblem != null && userInitiated); - if (!restartNeeded && !filesChanged.isEmpty()) { - restartNeeded = filesChanged.stream().map(watchedFilePaths::get).anyMatch(Boolean.TRUE::equals); - } + boolean restartNeeded = !instrumentationChange && (changedClassResults.isChanged() + || (IsolatedDevModeMain.deploymentProblem != null && userInitiated) || configFileRestartNeeded); if (restartNeeded) { - restartCallback.accept(filesChanged); + restartCallback.accept(filesChanged, changedClassResults); log.infof("Hot replace total time: %ss ", Timing.convertToBigDecimalSeconds(System.nanoTime() - startNanoseconds)); return true; } else if (!filesChanged.isEmpty()) { @@ -193,10 +264,24 @@ public boolean doScan(boolean userInitiated) throws IOException { } log.infof("Files changed but restart not needed - notified extensions in: %ss ", Timing.convertToBigDecimalSeconds(System.nanoTime() - startNanoseconds)); + } else if (instrumentationChange) { + log.infof("Hot replace performed via instrumentation, no restart needed, total time: %ss ", + Timing.convertToBigDecimalSeconds(System.nanoTime() - startNanoseconds)); } return false; } + private Boolean instrumentationEnabled() { + ClassLoader old = Thread.currentThread().getContextClassLoader(); + try { + Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); + return ConfigProvider.getConfig() + .getOptionalValue("quarkus.live-reload.instrumentation", boolean.class).orElse(true); + } finally { + Thread.currentThread().setContextClassLoader(old); + } + } + @Override public void addPreScanStep(Runnable runnable) { preScanSteps.add(runnable); @@ -237,8 +322,8 @@ public Set syncState(Map fileHashes) { } } - boolean checkForChangedClasses() throws IOException { - boolean hasChanges = false; + ClassScanResult checkForChangedClasses() throws IOException { + ClassScanResult classScanResult = new ClassScanResult(); boolean ignoreFirstScanChanges = !firstScanDone; for (DevModeContext.ModuleInfo module : context.getAllModules()) { @@ -271,31 +356,27 @@ && sourceFileWasRecentModified(p, ignoreFirstScanChanges)) compileProblem = null; } catch (Exception e) { compileProblem = e; - return false; + return new ClassScanResult(); } } } - if (checkForClassFilesChangesInModule(module, moduleChangedSourceFilePaths, ignoreFirstScanChanges)) { - hasChanges = true; - } + checkForClassFilesChangesInModule(module, moduleChangedSourceFilePaths, ignoreFirstScanChanges, classScanResult); } this.firstScanDone = true; - return hasChanges; + return classScanResult; } public Throwable getCompileProblem() { return compileProblem; } - private boolean checkForClassFilesChangesInModule(DevModeContext.ModuleInfo module, List moduleChangedSourceFiles, - boolean isInitialRun) { - boolean hasChanges = !moduleChangedSourceFiles.isEmpty(); - + private void checkForClassFilesChangesInModule(DevModeContext.ModuleInfo module, List moduleChangedSourceFiles, + boolean isInitialRun, ClassScanResult classScanResult) { if (module.getClassesPath() == null) { - return hasChanges; + return; } try { @@ -319,21 +400,26 @@ private boolean checkForClassFilesChangesInModule(DevModeContext.ModuleInfo modu // Source file has been deleted. Delete class and restart cleanUpClassFile(classFilePath); sourceFileTimestamps.remove(sourceFilePath); - hasChanges = true; + classScanResult.addDeletedClass(moduleClassesPath, classFilePath); } else { classFilePathToSourceFilePath.put(classFilePath, sourceFilePath); - if (classFileWasRecentModified(classFilePath, isInitialRun)) { + if (classFileWasAdded(classFilePath, isInitialRun)) { + // At least one class was recently modified. Restart. + classScanResult.addAddedClass(moduleClassesPath, classFilePath); + } else if (classFileWasRecentModified(classFilePath, isInitialRun)) { // At least one class was recently modified. Restart. - hasChanges = true; + classScanResult.addChangedClass(moduleClassesPath, classFilePath); } else if (moduleChangedSourceFiles.contains(sourceFilePath)) { - // Source file has been modified, we delete the .class files as they are going to - // be recompiled anyway, this allows for simple cleanup of inner classes + // Source file has been modified, but not the class file + // must be a removed inner class cleanUpClassFile(classFilePath); - hasChanges = true; + classScanResult.addDeletedClass(moduleClassesPath, classFilePath); } } + } else if (classFileWasAdded(classFilePath, isInitialRun)) { + classScanResult.addAddedClass(moduleClassesPath, classFilePath); } else if (classFileWasRecentModified(classFilePath, isInitialRun)) { - hasChanges = true; + classScanResult.addChangedClass(moduleClassesPath, classFilePath); } } } @@ -341,8 +427,6 @@ private boolean checkForClassFilesChangesInModule(DevModeContext.ModuleInfo modu } catch (IOException e) { throw new UncheckedIOException(e); } - - return hasChanges; } private Path retrieveSourceFilePathForClassFile(Path classFilePath, List moduleChangedSourceFiles, @@ -386,7 +470,7 @@ Set checkForFileChange() { outputPath = rootPath; doCopy = false; } - if (rootPath == null) { + if (rootPath == null || outputPath == null) { continue; } Path root = Paths.get(rootPath); @@ -484,6 +568,18 @@ private boolean classFileWasRecentModified(final Path classFilePath, boolean ign return checkIfFileModified(classFilePath, classFileChangeTimeStamps, ignoreFirstScanChanges); } + private boolean classFileWasAdded(final Path classFilePath, boolean ignoreFirstScanChanges) { + final Long lastRecordedChange = classFileChangeTimeStamps.get(classFilePath); + if (lastRecordedChange == null) { + try { + classFileChangeTimeStamps.put(classFilePath, Files.getLastModifiedTime(classFilePath).toMillis()); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + return lastRecordedChange == null && !ignoreFirstScanChanges; + } + private boolean checkIfFileModified(Path path, Map pathModificationTimes, boolean ignoreFirstScanChanges) { try { final long lastModificationTime = Files.getLastModifiedTime(path).toMillis(); @@ -505,6 +601,18 @@ private boolean checkIfFileModified(Path path, Map pathModificationT } } + public RuntimeUpdatesProcessor setDisableInstrumentationForClassPredicate( + Predicate disableInstrumentationForClassPredicate) { + this.disableInstrumentationForClassPredicate = disableInstrumentationForClassPredicate; + return this; + } + + public RuntimeUpdatesProcessor setDisableInstrumentationForIndexPredicate( + Predicate disableInstrumentationForIndexPredicate) { + this.disableInstrumentationForIndexPredicate = disableInstrumentationForIndexPredicate; + return this; + } + public RuntimeUpdatesProcessor setWatchedFilePaths(Map watchedFilePaths) { this.watchedFilePaths = watchedFilePaths; watchedFileTimestamps.clear(); @@ -544,6 +652,12 @@ public void startupFailed() { for (HotReplacementSetup i : hotReplacementSetup) { i.handleFailedInitialStart(); } + //if startup failed we always do a class loader based restart + lastStartIndex = null; + } + + public static void setLastStartIndex(IndexView lastStartIndex) { + RuntimeUpdatesProcessor.lastStartIndex = lastStartIndex; } @Override @@ -551,4 +665,5 @@ public void close() throws IOException { compiler.close(); FSWatchUtil.shutdown(); } + } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/EffectiveIdeBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/EffectiveIdeBuildItem.java new file mode 100644 index 0000000000000..d156b5222ed82 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/EffectiveIdeBuildItem.java @@ -0,0 +1,19 @@ +package io.quarkus.deployment.ide; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * Contains the IDE to be opened when a request to open a class is made + */ +public final class EffectiveIdeBuildItem extends SimpleBuildItem { + + private final Ide ide; + + public EffectiveIdeBuildItem(Ide ide) { + this.ide = ide; + } + + public Ide getIde() { + return ide; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/Ide.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/Ide.java new file mode 100644 index 0000000000000..e64c591d39da8 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/Ide.java @@ -0,0 +1,23 @@ +package io.quarkus.deployment.ide; + +public enum Ide { + + IDEA("idea"), + ECLIPSE("eclipse"), + VSCODE("code"), + NETBEANS("netbeans"); + + private String executable; + + private Ide(String executable) { + this.executable = executable; + } + + public String getExecutable() { + return executable; + } + + public void setExecutable(String executable) { + this.executable = executable; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeConfig.java new file mode 100644 index 0000000000000..4bbc0aa105d52 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeConfig.java @@ -0,0 +1,23 @@ +package io.quarkus.deployment.ide; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot +public class IdeConfig { + + /** + * The Ide to use to open files from the DevUI. + * {@code auto} means that Quarkus will attempt to determine the Ide being used. + */ + @ConfigItem(defaultValue = "auto") + public Target target; + + enum Target { + auto, + idea, + vscode, + eclipse, + netbeans + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeFileBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeFileBuildItem.java new file mode 100644 index 0000000000000..ad2a226d1a9c2 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeFileBuildItem.java @@ -0,0 +1,22 @@ +package io.quarkus.deployment.ide; + +import java.util.Set; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * Contains the set of IDEs that could be potentially linked to project based + * on the files present in the project + */ +final class IdeFileBuildItem extends SimpleBuildItem { + + private final Set detectedIDEs; + + IdeFileBuildItem(Set detectedIDEs) { + this.detectedIDEs = detectedIDEs; + } + + Set getDetectedIDEs() { + return detectedIDEs; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeProcessor.java new file mode 100644 index 0000000000000..aedf997e0e3c7 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeProcessor.java @@ -0,0 +1,249 @@ +package io.quarkus.deployment.ide; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Stream; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; +import io.quarkus.dev.spi.DevModeType; +import io.quarkus.runtime.util.JavaVersionUtil; + +public class IdeProcessor { + + private static Map> IDE_MARKER_FILES = new HashMap<>(); + private static Map, Ide> IDE_PROCESSES = new HashMap<>(); + private static Map> IDE_ARGUMENTS_EXEC_INDICATOR = new HashMap<>(); + + static { + IDE_MARKER_FILES.put(".idea", Collections.singletonList(Ide.IDEA)); + IDE_MARKER_FILES.put(".project", Arrays.asList(Ide.VSCODE, Ide.ECLIPSE)); + IDE_MARKER_FILES.put("nbactions.xml", Collections.singletonList(Ide.NETBEANS)); + IDE_MARKER_FILES.put("nb-configuration.xml", Collections.singletonList(Ide.NETBEANS)); + + IDE_MARKER_FILES = Collections.unmodifiableMap(IDE_MARKER_FILES); + + IDE_PROCESSES.put((processInfo -> processInfo.containInCommand("idea")), Ide.IDEA); + IDE_PROCESSES.put((processInfo -> processInfo.containInCommand("code")), Ide.VSCODE); + IDE_PROCESSES.put((processInfo -> processInfo.containInCommand("eclipse")), Ide.ECLIPSE); + IDE_PROCESSES.put( + (processInfo -> processInfo.containInCommandWithArgument("java", "netbeans")), + Ide.NETBEANS); + + IDE_ARGUMENTS_EXEC_INDICATOR.put(Ide.NETBEANS, (ProcessInfo processInfo) -> { + String platform = processInfo.getArgumentValue("-Dnetbeans.home"); + if (platform != null && !platform.isEmpty()) { + String os = System.getProperty("os.name"); + if (os.startsWith("Windows") || os.startsWith("windows")) { + platform = platform.replace("platform", "bin/netbeans.exe"); + } else { + platform = platform.replace("platform", "bin/netbeans"); + } + return platform; + } + return null; + }); + + IDE_PROCESSES = Collections.unmodifiableMap(IDE_PROCESSES); + } + + @BuildStep + public EffectiveIdeBuildItem effectiveIde(LaunchModeBuildItem launchModeBuildItem, IdeConfig ideConfig, + IdeFileBuildItem ideFile, + IdeRunningProcessBuildItem ideRunningProcess) { + if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { + return null; + } + Ide result = null; + if (ideConfig.target == IdeConfig.Target.auto) { + + // the idea here is to auto-detect the special files that IDEs create + // and also the running IDE process if need be + + if (ideFile.getDetectedIDEs().size() == 1) { + result = ideFile.getDetectedIDEs().iterator().next(); + } else { + Set runningIdes = ideRunningProcess.getDetectedIDEs(); + if (runningIdes.size() == 1) { + result = runningIdes.iterator().next(); + } else { + List matches = new ArrayList<>(); + for (Ide file : ideFile.getDetectedIDEs()) { + for (Ide process : runningIdes) { + if (file == process) { + matches.add(file); + } + } + } + if (matches.size() == 1) { + result = matches.get(0); + } + } + } + } else { + if (ideConfig.target == IdeConfig.Target.idea) { + result = Ide.IDEA; + } else if (ideConfig.target == IdeConfig.Target.eclipse) { + result = Ide.ECLIPSE; + } else if (ideConfig.target == IdeConfig.Target.vscode) { + result = Ide.VSCODE; + } else if (ideConfig.target == IdeConfig.Target.netbeans) { + result = Ide.NETBEANS; + } + } + + if (result == null) { + return null; + } + + return new EffectiveIdeBuildItem(result); + } + + @BuildStep + public IdeFileBuildItem detectIdeFiles(LaunchModeBuildItem launchModeBuildItem, + BuildSystemTargetBuildItem buildSystemTarget) { + if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { + return null; + } + Set result = new HashSet<>(2); + Path projectRoot = buildSystemTarget.getOutputDirectory().getParent(); + IDE_MARKER_FILES.forEach((file, ides) -> { + if (Files.exists(projectRoot.resolve(file))) { + result.addAll(ides); + } + }); + return new IdeFileBuildItem(result); + } + + @BuildStep + public IdeRunningProcessBuildItem detectRunningIdeProcesses(LaunchModeBuildItem launchModeBuildItem) { + if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { + return null; + } + Set result = new HashSet<>(4); + List processInfos = ProcessUtil.runningProcesses(); + for (ProcessInfo processInfo : processInfos) { + for (Map.Entry, Ide> entry : IDE_PROCESSES.entrySet()) { + if (entry.getKey().test(processInfo)) { + Ide ide = entry.getValue(); + + if (IDE_ARGUMENTS_EXEC_INDICATOR.containsKey(ide)) { + Function execIndicator = IDE_ARGUMENTS_EXEC_INDICATOR.get(ide); + String executeLine = execIndicator.apply(processInfo); + if (executeLine != null) { + ide.setExecutable(executeLine); + } + } + result.add(ide); + break; + } + } + } + return new IdeRunningProcessBuildItem(result); + } + + // TODO: remove when we move to Java 11 and just call the methods of 'java.lang.ProcessHandle' + private static class ProcessUtil { + + /** + * Returns a list of running processes + * Only works for Java 11+ + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + public static List runningProcesses() { + if (!JavaVersionUtil.isJava11OrHigher()) { + return Collections.emptyList(); + } + // we can't use ProcessHandle directly as it is Java 9+, so we need to do reflection to the get the info we need + try { + Class processHandlerClass = Class.forName("java.lang.ProcessHandle"); + Method allProcessesMethod = processHandlerClass.getMethod("allProcesses"); + Method processHandleInfoMethod = processHandlerClass.getMethod("info"); + Class processHandleInfoClass = Class.forName("java.lang.ProcessHandle$Info"); + Method processHandleInfoCommandMethod = processHandleInfoClass.getMethod("command"); + Method processHandleInfoArgumentsMethod = processHandleInfoClass.getMethod("arguments"); + Stream allProcessesResult = (Stream) allProcessesMethod.invoke(null); + List result = new ArrayList<>(); + allProcessesResult.forEach(o -> { + try { + Object processHandleInfo = processHandleInfoMethod.invoke(o); + Optional command = (Optional) processHandleInfoCommandMethod.invoke(processHandleInfo); + if (command.isPresent()) { + Optional arguments = (Optional) processHandleInfoArgumentsMethod + .invoke(processHandleInfo); + result.add(new ProcessInfo(command.get(), arguments.orElse(null))); + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + }); + return result; + } catch (Exception e) { + throw new RuntimeException("Unable to determine running processes", e); + } + } + } + + private static class ProcessInfo { + // the executable pathname of the process. + private final String command; + private final String[] arguments; + + public ProcessInfo(String command, String[] arguments) { + this.command = command; + this.arguments = arguments; + } + + public String getCommand() { + return command; + } + + public String[] getArguments() { + return arguments; + } + + public boolean containInCommandWithArgument(String command, String argument) { + return containInCommand(command) && containInArguments(argument); + } + + public boolean containInCommand(String value) { + return this.command.contains(value); + } + + public boolean containInArguments(String value) { + if (arguments != null) { + for (String argument : arguments) { + if (argument.contains(value)) { + return true; + } + } + } + return false; + } + + public String getArgumentValue(String argumentKey) { + if (arguments != null) { + for (String argument : arguments) { + if (argument.startsWith(argumentKey) && argument.contains("=")) { + return argument.substring(argument.indexOf("=") + 1); + } + } + } + return null; + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeRunningProcessBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeRunningProcessBuildItem.java new file mode 100644 index 0000000000000..83f74169d54e9 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/ide/IdeRunningProcessBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.deployment.ide; + +import java.util.Set; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * Contains the set of IDEs that are running as processes + */ +final class IdeRunningProcessBuildItem extends SimpleBuildItem { + + private final Set detectedIDEs; + + IdeRunningProcessBuildItem(Set detectedIDEs) { + this.detectedIDEs = detectedIDEs; + } + + Set getDetectedIDEs() { + return detectedIDEs; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java index c92197804a9e8..4154dd75786cf 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java @@ -2,28 +2,22 @@ import java.io.FileInputStream; import java.io.IOException; -import java.io.InputStream; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.ProviderNotFoundException; import java.util.ArrayList; -import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; import java.util.stream.Stream; -import java.util.zip.ZipEntry; import org.jboss.jandex.CompositeIndex; import org.jboss.jandex.Index; -import org.jboss.jandex.IndexReader; import org.jboss.jandex.IndexView; import org.jboss.jandex.Indexer; import org.jboss.logging.Logger; @@ -45,6 +39,8 @@ import io.quarkus.deployment.builditem.LiveReloadBuildItem; import io.quarkus.deployment.builditem.QuarkusBuildCloseablesBuildItem; import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; +import io.quarkus.runtime.annotations.ConfigDocMapKey; +import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; @@ -53,20 +49,18 @@ public class ApplicationArchiveBuildStep { private static final Logger LOGGER = Logger.getLogger(ApplicationArchiveBuildStep.class); - private static final String JANDEX_INDEX = "META-INF/jandex.idx"; - - // At least Jandex 2.1 is needed - private static final int REQUIRED_INDEX_VERSION = 8; - IndexDependencyConfiguration config; @ConfigRoot(phase = ConfigPhase.BUILD_TIME) static final class IndexDependencyConfiguration { /** - * Artifacts on the class path that should also be indexed, which will allow classes in the index to be - * processed by Quarkus processors + * Artifacts on the classpath that should also be indexed. + *

+ * Their classes will be in the index and processed by Quarkus processors. */ @ConfigItem(name = ConfigItem.PARENT) + @ConfigDocSection + @ConfigDocMapKey("dependency-name") Map indexDependency; } @@ -120,7 +114,7 @@ private List scanForOtherIndexes(QuarkusBuildCloseablesBuild //get paths that are included via marker files Set markers = new HashSet<>(applicationArchiveFiles); - markers.add(JANDEX_INDEX); + markers.add(IndexingUtil.JANDEX_INDEX); addMarkerFilePaths(markers, root, curateOutcomeBuildItem, indexedPaths, appArchives, buildCloseables, classLoader, indexCache); @@ -247,18 +241,6 @@ private static boolean containsMarker(Path dir, Set applicationArchiveFi } private static Index handleFilePath(Path path) throws IOException { - Path existing = path.resolve(JANDEX_INDEX); - if (Files.exists(existing)) { - try (FileInputStream in = new FileInputStream(existing.toFile())) { - IndexReader reader = new IndexReader(in); - if (reader.getIndexVersion() < REQUIRED_INDEX_VERSION) { - LOGGER.warnf("Re-indexing %s - at least Jandex 2.1 must be used to index an application dependency", path); - return indexFilePath(path); - } else { - return reader.read(); - } - } - } return indexFilePath(path); } @@ -278,26 +260,12 @@ private static Index indexFilePath(Path path) throws IOException { return indexer.complete(); } - private static Index handleJarPath(Path path, IndexCache indexCache) throws IOException { + private static Index handleJarPath(Path path, IndexCache indexCache) { return indexCache.cache.computeIfAbsent(path, new Function() { @Override public Index apply(Path path) { - try (JarFile file = new JarFile(path.toFile())) { - ZipEntry existing = file.getEntry(JANDEX_INDEX); - if (existing != null) { - try (InputStream in = file.getInputStream(existing)) { - IndexReader reader = new IndexReader(in); - if (reader.getIndexVersion() < REQUIRED_INDEX_VERSION) { - LOGGER.warnf( - "Re-indexing %s - at least Jandex 2.1 must be used to index an application dependency", - path); - return indexJar(file); - } else { - return reader.read(); - } - } - } - return indexJar(file); + try { + return IndexingUtil.indexJar(path); } catch (IOException e) { throw new RuntimeException("Failed to process " + path, e); } @@ -305,20 +273,6 @@ public Index apply(Path path) { }); } - private static Index indexJar(JarFile file) throws IOException { - Indexer indexer = new Indexer(); - Enumeration e = file.entries(); - while (e.hasMoreElements()) { - JarEntry entry = e.nextElement(); - if (entry.getName().endsWith(".class")) { - try (InputStream inputStream = file.getInputStream(entry)) { - indexer.index(inputStream); - } - } - } - return indexer.complete(); - } - /** * When running in hot deployment mode we know that java archives will never change, there is no need * to re-index them each time. We cache them here to reduce the hot reload time. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java index 6b00626499354..bc9de09855d18 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java @@ -9,19 +9,19 @@ public class IndexDependencyConfig { /** - * The maven groupId of the artifact to index + * The maven groupId of the artifact. */ @ConfigItem public String groupId; /** - * The maven artifactId of the artifact to index + * The maven artifactId of the artifact. */ @ConfigItem public String artifactId; /** - * The maven classifier of the artifact to index + * The maven classifier of the artifact. */ @ConfigItem public Optional classifier; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexWrapper.java b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexWrapper.java new file mode 100644 index 0000000000000..05a7ca734d7bb --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexWrapper.java @@ -0,0 +1,216 @@ +package io.quarkus.deployment.index; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; +import org.jboss.jandex.Index; +import org.jboss.jandex.IndexView; +import org.jboss.jandex.Indexer; +import org.jboss.jandex.Type; +import org.jboss.logging.Logger; + +import io.quarkus.deployment.steps.CombinedIndexBuildStep; + +/** + * This wrapper is used to index JDK classes on demand. + */ +public class IndexWrapper implements IndexView { + + private static final Logger LOGGER = Logger.getLogger(CombinedIndexBuildStep.class); + + private final IndexView index; + private final ClassLoader deploymentClassLoader; + final Map> additionalClasses; + + public IndexWrapper(IndexView index, ClassLoader deploymentClassLoader, PersistentClassIndex persistentClassIndex) { + this.index = index; + this.deploymentClassLoader = deploymentClassLoader; + this.additionalClasses = persistentClassIndex.additionalClasses; + } + + @Override + public Collection getKnownClasses() { + if (additionalClasses.isEmpty()) { + return index.getKnownClasses(); + } + Collection known = index.getKnownClasses(); + Collection additional = additionalClasses.values().stream().filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + List all = new ArrayList<>(known.size() + additional.size()); + all.addAll(known); + all.addAll(additional); + return all; + } + + @Override + public ClassInfo getClassByName(DotName className) { + ClassInfo classInfo = index.getClassByName(className); + if (classInfo == null) { + classInfo = additionalClasses.computeIfAbsent(className, this::computeAdditional).orElse(null); + } + return classInfo; + } + + @Override + public Collection getKnownDirectSubclasses(DotName className) { + if (additionalClasses.isEmpty()) { + return index.getKnownDirectSubclasses(className); + } + Set directSubclasses = new HashSet(index.getKnownDirectSubclasses(className)); + for (Optional additional : additionalClasses.values()) { + if (additional.isPresent() && className.equals(additional.get().superName())) { + directSubclasses.add(additional.get()); + } + } + return directSubclasses; + } + + @Override + public Collection getAllKnownSubclasses(DotName className) { + if (additionalClasses.isEmpty()) { + return index.getAllKnownSubclasses(className); + } + final Set allKnown = new HashSet(); + final Set processedClasses = new HashSet(); + getAllKnownSubClasses(className, allKnown, processedClasses); + return allKnown; + } + + @Override + public Collection getKnownDirectImplementors(DotName className) { + if (additionalClasses.isEmpty()) { + return index.getKnownDirectImplementors(className); + } + Set directImplementors = new HashSet(index.getKnownDirectImplementors(className)); + for (Optional additional : additionalClasses.values()) { + if (!additional.isPresent()) { + continue; + } + for (Type interfaceType : additional.get().interfaceTypes()) { + if (className.equals(interfaceType.name())) { + directImplementors.add(additional.get()); + break; + } + } + } + return directImplementors; + } + + @Override + public Collection getAllKnownImplementors(DotName interfaceName) { + if (additionalClasses.isEmpty()) { + return index.getAllKnownImplementors(interfaceName); + } + final Set allKnown = new HashSet(); + final Set subInterfacesToProcess = new HashSet(); + final Set processedClasses = new HashSet(); + subInterfacesToProcess.add(interfaceName); + while (!subInterfacesToProcess.isEmpty()) { + final Iterator toProcess = subInterfacesToProcess.iterator(); + DotName name = toProcess.next(); + toProcess.remove(); + processedClasses.add(name); + getKnownImplementors(name, allKnown, subInterfacesToProcess, processedClasses); + } + return allKnown; + } + + @Override + public Collection getAnnotations(DotName annotationName) { + return index.getAnnotations(annotationName); + } + + @Override + public Collection getAnnotationsWithRepeatable(DotName annotationName, IndexView index) { + return this.index.getAnnotationsWithRepeatable(annotationName, index); + } + + private void getAllKnownSubClasses(DotName className, Set allKnown, Set processedClasses) { + final Set subClassesToProcess = new HashSet(); + subClassesToProcess.add(className); + while (!subClassesToProcess.isEmpty()) { + final Iterator toProcess = subClassesToProcess.iterator(); + DotName name = toProcess.next(); + toProcess.remove(); + processedClasses.add(name); + getAllKnownSubClasses(name, allKnown, subClassesToProcess, processedClasses); + } + } + + private void getAllKnownSubClasses(DotName name, Set allKnown, Set subClassesToProcess, + Set processedClasses) { + final Collection directSubclasses = getKnownDirectSubclasses(name); + if (directSubclasses != null) { + for (final ClassInfo clazz : directSubclasses) { + final DotName className = clazz.name(); + if (!processedClasses.contains(className)) { + allKnown.add(clazz); + subClassesToProcess.add(className); + } + } + } + } + + private void getKnownImplementors(DotName name, Set allKnown, Set subInterfacesToProcess, + Set processedClasses) { + final Collection list = getKnownDirectImplementors(name); + if (list != null) { + for (final ClassInfo clazz : list) { + final DotName className = clazz.name(); + if (!processedClasses.contains(className)) { + if (Modifier.isInterface(clazz.flags())) { + subInterfacesToProcess.add(className); + } else { + if (!allKnown.contains(clazz)) { + allKnown.add(clazz); + processedClasses.add(className); + getAllKnownSubClasses(className, allKnown, processedClasses); + } + } + } + } + } + } + + private Optional computeAdditional(DotName className) { + LOGGER.debugf("Index: %s", className); + Indexer indexer = new Indexer(); + if (index(indexer, className.toString(), deploymentClassLoader)) { + Index index = indexer.complete(); + return Optional.of(index.getClassByName(className)); + } else { + // Note that ConcurrentHashMap does not allow null to be used as a value + return Optional.empty(); + } + } + + static boolean index(Indexer indexer, String className, ClassLoader classLoader) { + boolean result = false; + try (InputStream stream = classLoader + .getResourceAsStream(className.replace('.', '/') + ".class")) { + if (stream != null) { + indexer.index(stream); + result = true; + } else { + LOGGER.warnf("Failed to index %s: Class does not exist in ClassLoader %s", className, classLoader); + } + } catch (IOException e) { + LOGGER.warnf(e, "Failed to index %s: %s", className, e.getMessage()); + } + return result; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexingUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexingUtil.java index 832382496ef45..c092032e2ff7d 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexingUtil.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexingUtil.java @@ -1,12 +1,20 @@ package io.quarkus.deployment.index; import java.io.ByteArrayInputStream; +import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Path; +import java.util.Enumeration; import java.util.Set; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.zip.ZipEntry; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.DotName; +import org.jboss.jandex.Index; +import org.jboss.jandex.IndexReader; import org.jboss.jandex.IndexView; import org.jboss.jandex.Indexer; import org.jboss.logging.Logger; @@ -19,6 +27,49 @@ public class IndexingUtil { public static final DotName OBJECT = DotName.createSimple(Object.class.getName()); + public static final String JANDEX_INDEX = "META-INF/jandex.idx"; + + // At least Jandex 2.1 is needed + private static final int REQUIRED_INDEX_VERSION = 8; + + public static Index indexJar(Path path) throws IOException { + return indexJar(path.toFile()); + } + + public static Index indexJar(File file) throws IOException { + try (JarFile jarFile = new JarFile(file)) { + ZipEntry existing = jarFile.getEntry(JANDEX_INDEX); + if (existing != null) { + try (InputStream in = jarFile.getInputStream(existing)) { + IndexReader reader = new IndexReader(in); + if (reader.getIndexVersion() < REQUIRED_INDEX_VERSION) { + log.warnf( + "Re-indexing %s - at least Jandex 2.1 must be used to index an application dependency", + file); + return indexJar(jarFile); + } else { + return reader.read(); + } + } + } + return indexJar(jarFile); + } + } + + private static Index indexJar(JarFile file) throws IOException { + Indexer indexer = new Indexer(); + Enumeration e = file.entries(); + while (e.hasMoreElements()) { + JarEntry entry = e.nextElement(); + if (entry.getName().endsWith(".class")) { + try (InputStream inputStream = file.getInputStream(entry)) { + indexer.index(inputStream); + } + } + } + return indexer.complete(); + } + public static void indexClass(String beanClass, Indexer indexer, IndexView quarkusIndex, Set additionalIndex, ClassLoader classLoader) { DotName beanClassName = DotName.createSimple(beanClass); @@ -31,7 +82,7 @@ public static void indexClass(String beanClass, Indexer indexer, IndexView quark try (InputStream stream = IoUtil.readClass(classLoader, beanClass)) { beanInfo = indexer.index(stream); additionalIndex.add(beanInfo.name()); - } catch (IOException e) { + } catch (Exception e) { throw new IllegalStateException("Failed to index: " + beanClass, e); } } else { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/PersistentClassIndex.java b/core/deployment/src/main/java/io/quarkus/deployment/index/PersistentClassIndex.java new file mode 100644 index 0000000000000..d7097699a9222 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/PersistentClassIndex.java @@ -0,0 +1,17 @@ +package io.quarkus.deployment.index; + +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; + +public class PersistentClassIndex { + + final Map> additionalClasses = new ConcurrentHashMap<>(); + + public Map> getAdditionalClasses() { + return additionalClasses; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/jbang/JBangAugmentorImpl.java b/core/deployment/src/main/java/io/quarkus/deployment/jbang/JBangAugmentorImpl.java new file mode 100644 index 0000000000000..5f3632e14a8d6 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/jbang/JBangAugmentorImpl.java @@ -0,0 +1,161 @@ +package io.quarkus.deployment.jbang; + +import java.io.File; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +import io.quarkus.bootstrap.BootstrapGradleException; +import io.quarkus.bootstrap.app.AdditionalDependency; +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; +import io.quarkus.bootstrap.resolver.model.WorkspaceModule; +import io.quarkus.bootstrap.util.QuarkusModelHelper; +import io.quarkus.builder.BuildChainBuilder; +import io.quarkus.builder.BuildResult; +import io.quarkus.builder.BuildStepBuilder; +import io.quarkus.deployment.QuarkusAugmentor; +import io.quarkus.deployment.builditem.ApplicationClassNameBuildItem; +import io.quarkus.deployment.builditem.GeneratedClassBuildItem; +import io.quarkus.deployment.builditem.GeneratedResourceBuildItem; +import io.quarkus.deployment.builditem.LiveReloadBuildItem; +import io.quarkus.deployment.builditem.MainClassBuildItem; +import io.quarkus.deployment.builditem.TransformedClassesBuildItem; +import io.quarkus.deployment.dev.DevModeContext; +import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; +import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; +import io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled; +import io.quarkus.runtime.LaunchMode; + +public class JBangAugmentorImpl implements BiConsumer> { + + @Override + public void accept(CuratedApplication curatedApplication, Map resultMap) { + + QuarkusClassLoader classLoader = curatedApplication.getAugmentClassLoader(); + + QuarkusBootstrap quarkusBootstrap = curatedApplication.getQuarkusBootstrap(); + QuarkusAugmentor.Builder builder = QuarkusAugmentor.builder() + .setRoot(quarkusBootstrap.getApplicationRoot()) + .setClassLoader(classLoader) + .addFinal(ApplicationClassNameBuildItem.class) + .setTargetDir(quarkusBootstrap.getTargetDirectory()) + .setDeploymentClassLoader(curatedApplication.createDeploymentClassLoader()) + .setBuildSystemProperties(quarkusBootstrap.getBuildSystemProperties()) + .setEffectiveModel(curatedApplication.getAppModel()); + if (quarkusBootstrap.getBaseName() != null) { + builder.setBaseName(quarkusBootstrap.getBaseName()); + } + + builder.setLaunchMode(LaunchMode.NORMAL); + builder.setRebuild(quarkusBootstrap.isRebuild()); + builder.setLiveReloadState( + new LiveReloadBuildItem(false, Collections.emptySet(), new HashMap<>(), null)); + for (AdditionalDependency i : quarkusBootstrap.getAdditionalApplicationArchives()) { + //this gets added to the class path either way + //but we only need to add it to the additional app archives + //if it is forced as an app archive + if (i.isForceApplicationArchive()) { + builder.addAdditionalApplicationArchive(i.getArchivePath()); + } + } + builder.addBuildChainCustomizer(new Consumer() { + @Override + public void accept(BuildChainBuilder builder) { + final BuildStepBuilder stepBuilder = builder.addBuildStep((ctx) -> { + ctx.produce(new ProcessInheritIODisabled()); + }); + stepBuilder.produces(ProcessInheritIODisabled.class).build(); + } + }); + builder.excludeFromIndexing(quarkusBootstrap.getExcludeFromClassPath()); + builder.addFinal(GeneratedClassBuildItem.class); + builder.addFinal(MainClassBuildItem.class); + builder.addFinal(GeneratedResourceBuildItem.class); + builder.addFinal(TransformedClassesBuildItem.class); + boolean nativeRequested = "native".equals(System.getProperty("quarkus.package.type")); + boolean containerBuildRequested = Boolean.getBoolean("quarkus.container-image.build"); + if (nativeRequested) { + builder.addFinal(NativeImageBuildItem.class); + } + if (containerBuildRequested) { + //TODO: this is a bit ugly + //we don't nessesarily need these artifacts + //but if we include them it does mean that you can auto create docker images + //and deploy to kube etc + //for an ordinary build with no native and no docker this is a waste + builder.addFinal(ArtifactResultBuildItem.class); + } + + try { + BuildResult buildResult = builder.build().run(); + Map result = new HashMap<>(); + for (GeneratedClassBuildItem i : buildResult.consumeMulti(GeneratedClassBuildItem.class)) { + result.put(i.getName().replace(".", "/") + ".class", i.getClassData()); + } + for (GeneratedResourceBuildItem i : buildResult.consumeMulti(GeneratedResourceBuildItem.class)) { + result.put(i.getName(), i.getClassData()); + } + for (Map.Entry> entry : buildResult + .consume(TransformedClassesBuildItem.class).getTransformedClassesByJar().entrySet()) { + for (TransformedClassesBuildItem.TransformedClass transformed : entry.getValue()) { + result.put(transformed.getFileName(), transformed.getData()); + } + } + resultMap.put("files", result); + List javaargs = new ArrayList(); + javaargs.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager"); + resultMap.put("java-args", javaargs); + resultMap.put("main-class", buildResult.consume(MainClassBuildItem.class).getClassName()); + if (nativeRequested) { + resultMap.put("native-image", buildResult.consume(NativeImageBuildItem.class).getPath()); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private DevModeContext.ModuleInfo toModule(WorkspaceModule module) throws BootstrapGradleException { + AppArtifactKey key = new AppArtifactKey(module.getArtifactCoords().getGroupId(), + module.getArtifactCoords().getArtifactId(), module.getArtifactCoords().getClassifier()); + + Set sourceDirectories = new HashSet<>(); + Set sourceParents = new HashSet<>(); + for (File srcDir : module.getSourceSourceSet().getSourceDirectories()) { + sourceDirectories.add(srcDir.getPath()); + sourceParents.add(srcDir.getParent()); + } + + return new DevModeContext.ModuleInfo(key, + module.getArtifactCoords().getArtifactId(), + module.getProjectRoot().getPath(), + sourceDirectories, + QuarkusModelHelper.getClassPath(module).toAbsolutePath().toString(), + module.getSourceSourceSet().getResourceDirectory().toString(), + module.getSourceSet().getResourceDirectory().getPath(), + sourceParents, + module.getBuildDir().toPath().resolve("generated-sources").toAbsolutePath().toString(), + module.getBuildDir().toString()); + } + + private DevModeContext.ModuleInfo toModule(LocalProject project) { + return new DevModeContext.ModuleInfo(project.getKey(), project.getArtifactId(), + project.getDir().toAbsolutePath().toString(), + Collections.singleton(project.getSourcesSourcesDir().toAbsolutePath().toString()), + project.getClassesDir().toAbsolutePath().toString(), + project.getResourcesSourcesDir().toAbsolutePath().toString(), + project.getSourcesDir().toString(), + project.getCodeGenOutputDir().toString(), + project.getOutputDir().toString()); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java index fed4d7e04b1b0..9e7a1c55553dc 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java @@ -2,20 +2,28 @@ import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.logging.Handler; +import java.util.logging.Level; import java.util.stream.Collectors; import org.jboss.logmanager.EmbeddedConfigurator; +import org.objectweb.asm.Opcodes; +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.bootstrap.logging.InitialConfigurator; +import io.quarkus.deployment.GeneratedClassGizmoAdaptor; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ConsoleFormatterBannerBuildItem; +import io.quarkus.deployment.builditem.GeneratedClassBuildItem; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; import io.quarkus.deployment.builditem.LogCategoryBuildItem; import io.quarkus.deployment.builditem.LogConsoleFormatBuildItem; import io.quarkus.deployment.builditem.LogHandlerBuildItem; @@ -25,12 +33,42 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageSystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; +import io.quarkus.deployment.metrics.MetricsCapabilityBuildItem; +import io.quarkus.deployment.metrics.MetricsFactoryConsumerBuildItem; +import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; +import io.quarkus.gizmo.AnnotationCreator; +import io.quarkus.gizmo.BranchResult; +import io.quarkus.gizmo.BytecodeCreator; +import io.quarkus.gizmo.ClassCreator; +import io.quarkus.gizmo.ClassOutput; +import io.quarkus.gizmo.FieldCreator; +import io.quarkus.gizmo.FieldDescriptor; +import io.quarkus.gizmo.MethodCreator; +import io.quarkus.gizmo.MethodDescriptor; +import io.quarkus.gizmo.ResultHandle; +import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.configuration.ConfigInstantiator; +import io.quarkus.runtime.logging.CategoryBuildTimeConfig; +import io.quarkus.runtime.logging.CleanupFilterConfig; +import io.quarkus.runtime.logging.InheritableLevel; +import io.quarkus.runtime.logging.LogBuildTimeConfig; +import io.quarkus.runtime.logging.LogCleanupFilterElement; import io.quarkus.runtime.logging.LogConfig; +import io.quarkus.runtime.logging.LogMetricsHandlerRecorder; import io.quarkus.runtime.logging.LoggingSetupRecorder; public final class LoggingResourceProcessor { + private static final String LOGMANAGER_LOGGER_CLASS_NAME = "io.quarkus.runtime.generated.Target_org_jboss_logmanager_Logger"; + private static final String LOGGING_LOGGER_CLASS_NAME = "io.quarkus.runtime.generated.Target_org_jboss_logging_Logger"; + private static final String LOGGER_NODE_CLASS_NAME = "io.quarkus.runtime.generated.Target_org_jboss_logmanager_LoggerNode"; + + private static final String MIN_LEVEL_COMPUTE_CLASS_NAME = "io.quarkus.runtime.generated.MinLevelCompute"; + private static final MethodDescriptor IS_MIN_LEVEL_ENABLED = MethodDescriptor.ofMethod(MIN_LEVEL_COMPUTE_CLASS_NAME, + "isMinLevelEnabled", + boolean.class, int.class, String.class); + @BuildStep void setupLogFilters(BuildProducer filters) { filters.produce(new LogCleanupFilterBuildItem("org.jboss.threads", "JBoss Threads version")); @@ -89,10 +127,12 @@ void miscSetup( @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - LoggingSetupBuildItem setupLoggingRuntimeInit(LoggingSetupRecorder recorder, LogConfig log, + LoggingSetupBuildItem setupLoggingRuntimeInit(LoggingSetupRecorder recorder, LogConfig log, LogBuildTimeConfig buildLog, List handlerBuildItems, List namedHandlerBuildItems, List consoleFormatItems, - Optional possibleBannerBuildItem) { + Optional possibleBannerBuildItem, + LaunchModeBuildItem launchModeBuildItem, + List logCleanupFilters) { final List>> handlers = handlerBuildItems.stream() .map(LogHandlerBuildItem::getHandlerValue) .collect(Collectors.toList()); @@ -107,9 +147,29 @@ LoggingSetupBuildItem setupLoggingRuntimeInit(LoggingSetupRecorder recorder, Log if (bannerBuildItem != null) { possibleSupplier = bannerBuildItem.getBannerSupplier(); } - recorder.initializeLogging(log, handlers, namedHandlers, + recorder.initializeLogging(log, buildLog, handlers, namedHandlers, consoleFormatItems.stream().map(LogConsoleFormatBuildItem::getFormatterValue).collect(Collectors.toList()), possibleSupplier); + if (launchModeBuildItem.getLaunchMode() != LaunchMode.NORMAL) { + LogConfig logConfig = new LogConfig(); + ConfigInstantiator.handleObject(logConfig); + for (LogCleanupFilterBuildItem i : logCleanupFilters) { + CleanupFilterConfig value = new CleanupFilterConfig(); + LogCleanupFilterElement filterElement = i.getFilterElement(); + value.ifStartsWith = filterElement.getMessageStarts(); + value.targetLevel = filterElement.getTargetLevel() == null ? org.jboss.logmanager.Level.DEBUG + : filterElement.getTargetLevel(); + logConfig.filters.put(filterElement.getLoggerName(), value); + } + LoggingSetupRecorder.initializeBuildTimeLogging(logConfig, buildLog); + ((QuarkusClassLoader) Thread.currentThread().getContextClassLoader()).addCloseTask(new Runnable() { + @Override + public void run() { + InitialConfigurator.DELAYED_HANDLER.buildTimeComplete(); + } + }); + } + return new LoggingSetupBuildItem(); } @@ -124,4 +184,219 @@ void setupLoggingStaticInit(LoggingSetupRecorder recorder) { void setUpDarkeningDefault(Consumer rtcConsumer) { rtcConsumer.accept(new RunTimeConfigurationDefaultBuildItem("quarkus.log.console.darken", "0")); } + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void registerMetrics(LogMetricsHandlerRecorder recorder, LogBuildTimeConfig log, + BuildProducer metrics, + BuildProducer logHandler, Optional metricsCapability) { + if (metricsCapability.isPresent() && log.metricsEnabled) { + recorder.initCounters(); + metrics.produce(new MetricsFactoryConsumerBuildItem(recorder.registerMetrics())); + logHandler.produce(new LogHandlerBuildItem(recorder.getLogHandler())); + } + } + + @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) + void setUpMinLevelLogging(LogBuildTimeConfig log, + final BuildProducer generatedTraceLogger) { + ClassOutput output = new GeneratedClassGizmoAdaptor(generatedTraceLogger, false); + if (log.categories.isEmpty() || allMinLevelInfoOrHigher(log.minLevel.intValue(), log.categories)) { + generateDefaultLoggers(log.minLevel, output); + } else { + generateCategoryMinLevelLoggers(log.categories, log.minLevel, output); + } + } + + private static boolean allMinLevelInfoOrHigher(int minLogLevel, Map categories) { + return categories.values().stream() + .allMatch(categoryConfig -> categoryConfig.minLevel.getLevel().intValue() >= minLogLevel); + } + + private static void generateDefaultLoggers(Level minLevel, ClassOutput output) { + generateDefaultLoggingLogger(minLevel, output); + generateDefaultLoggerNode(output); + generateLogManagerLogger(output, LoggingResourceProcessor.generateMinLevelDefault(minLevel.getName())); + } + + private static void generateCategoryMinLevelLoggers(Map categories, Level rootMinLevel, + ClassOutput output) { + generateMinLevelCompute(categories, rootMinLevel, output); + generateDefaultLoggerNode(output); + generateLogManagerLogger(output, LoggingResourceProcessor::generateMinLevelCheckCategory); + } + + private static BranchResult generateMinLevelCheckCategory(MethodCreator method, FieldDescriptor nameAliasDescriptor) { + final ResultHandle levelIntValue = getParamLevelIntValue(method); + final ResultHandle nameAlias = method.readInstanceField(nameAliasDescriptor, method.getThis()); + return method.ifTrue(method.invokeStaticMethod(IS_MIN_LEVEL_ENABLED, levelIntValue, nameAlias)); + } + + private static void generateMinLevelCompute(Map categories, Level rootMinLevel, + ClassOutput output) { + try (ClassCreator cc = ClassCreator.builder().setFinal(true) + .className(MIN_LEVEL_COMPUTE_CLASS_NAME) + .classOutput(output).build()) { + + try (MethodCreator mc = cc.getMethodCreator(IS_MIN_LEVEL_ENABLED)) { + mc.setModifiers(Opcodes.ACC_STATIC); + + final ResultHandle level = mc.getMethodParam(0); + final ResultHandle name = mc.getMethodParam(1); + + BytecodeCreator current = mc; + for (Map.Entry entry : categories.entrySet()) { + final String category = entry.getKey(); + final int categoryLevelIntValue = getLogMinLevel(entry.getKey(), entry.getValue(), categories, rootMinLevel) + .intValue(); + + ResultHandle equalsResult = current.invokeVirtualMethod( + MethodDescriptor.ofMethod(Object.class, "equals", boolean.class, Object.class), + name, current.load(category)); + + BranchResult equalsBranch = current.ifTrue(equalsResult); + try (BytecodeCreator false1 = equalsBranch.falseBranch()) { + ResultHandle startsWithResult = false1.invokeVirtualMethod( + MethodDescriptor.ofMethod(String.class, "startsWith", boolean.class, String.class), + name, false1.load(category + ".")); + + BranchResult startsWithBranch = false1.ifTrue(startsWithResult); + + final BytecodeCreator startsWithTrue = startsWithBranch.trueBranch(); + final BranchResult levelCompareBranch = startsWithTrue.ifIntegerGreaterEqual(level, + startsWithTrue.load(categoryLevelIntValue)); + levelCompareBranch.trueBranch().returnValue(levelCompareBranch.trueBranch().load(true)); + levelCompareBranch.falseBranch().returnValue(levelCompareBranch.falseBranch().load(false)); + + current = startsWithBranch.falseBranch(); + } + + equalsBranch.trueBranch().returnValue(equalsBranch.trueBranch().load(true)); + } + + final ResultHandle infoLevelIntValue = getLogManagerLevelIntValue(rootMinLevel.toString(), current); + final BranchResult isInfoOrHigherBranch = current.ifIntegerGreaterEqual(level, infoLevelIntValue); + isInfoOrHigherBranch.trueBranch().returnValue(isInfoOrHigherBranch.trueBranch().load(true)); + isInfoOrHigherBranch.falseBranch().returnValue(isInfoOrHigherBranch.falseBranch().load(false)); + } + } + } + + private static Level getLogMinLevel(String categoryName, CategoryBuildTimeConfig categoryConfig, + Map categories, + Level rootMinLevel) { + if (Objects.isNull(categoryConfig)) + return rootMinLevel; + + final InheritableLevel inheritableLevel = categoryConfig.minLevel; + if (!inheritableLevel.isInherited()) + return inheritableLevel.getLevel(); + + int lastDotIndex = categoryName.lastIndexOf('.'); + if (lastDotIndex == -1) + return rootMinLevel; + + String parent = categoryName.substring(0, lastDotIndex); + return getLogMinLevel(parent, categories.get(parent), categories, rootMinLevel); + } + + private static void generateDefaultLoggerNode(ClassOutput output) { + try (ClassCreator cc = ClassCreator.builder().setFinal(true) + .className(LOGGER_NODE_CLASS_NAME) + .classOutput(output).build()) { + + AnnotationCreator targetClass = cc.addAnnotation("com.oracle.svm.core.annotate.TargetClass"); + targetClass.addValue("className", "org.jboss.logmanager.LoggerNode"); + + final MethodCreator isLoggableLevelMethod = cc.getMethodCreator("isLoggableLevel", boolean.class, int.class); + isLoggableLevelMethod.addAnnotation("com.oracle.svm.core.annotate.Alias"); + isLoggableLevelMethod.returnValue(isLoggableLevelMethod.load(false)); + } + } + + private static void generateLogManagerLogger(ClassOutput output, + BiFunction isMinLevelEnabledFunction) { + try (ClassCreator cc = ClassCreator.builder().setFinal(true) + .className(LOGMANAGER_LOGGER_CLASS_NAME) + .classOutput(output).build()) { + + AnnotationCreator targetClass = cc.addAnnotation("com.oracle.svm.core.annotate.TargetClass"); + targetClass.addValue("className", "org.jboss.logmanager.Logger"); + + FieldCreator nameAlias = cc.getFieldCreator("name", String.class); + nameAlias.addAnnotation("com.oracle.svm.core.annotate.Alias"); + + FieldCreator loggerNodeAlias = cc.getFieldCreator("loggerNode", LOGGER_NODE_CLASS_NAME); + loggerNodeAlias.addAnnotation("com.oracle.svm.core.annotate.Alias"); + + final MethodCreator isLoggableMethod = cc.getMethodCreator("isLoggable", boolean.class, + java.util.logging.Level.class); + isLoggableMethod.addAnnotation("com.oracle.svm.core.annotate.Substitute"); + + final ResultHandle levelIntValue = getParamLevelIntValue(isLoggableMethod); + + final BranchResult levelBranch = isMinLevelEnabledFunction.apply(isLoggableMethod, nameAlias.getFieldDescriptor()); + + final BytecodeCreator levelTrue = levelBranch.trueBranch(); + levelTrue.returnValue( + levelTrue.invokeVirtualMethod( + MethodDescriptor.ofMethod(LOGGER_NODE_CLASS_NAME, "isLoggableLevel", boolean.class, int.class), + levelTrue.readInstanceField(loggerNodeAlias.getFieldDescriptor(), levelTrue.getThis()), + levelIntValue)); + + final BytecodeCreator levelFalse = levelBranch.falseBranch(); + levelFalse.returnValue(levelFalse.load(false)); + } + } + + private static ResultHandle getParamLevelIntValue(MethodCreator method) { + final ResultHandle level = method.getMethodParam(0); + return method + .invokeVirtualMethod(MethodDescriptor.ofMethod(Level.class, "intValue", int.class), level); + } + + private static BiFunction generateMinLevelDefault( + String defaultMinLevelName) { + return (method, nameAliasDescriptor) -> { + final ResultHandle levelIntValue = getParamLevelIntValue(method); + final ResultHandle infoLevelIntValue = getLogManagerLevelIntValue(defaultMinLevelName, method); + return method.ifIntegerGreaterEqual(levelIntValue, infoLevelIntValue); + }; + } + + private static ResultHandle getLogManagerLevelIntValue(String levelName, BytecodeCreator method) { + final ResultHandle infoLevel = method.readStaticField( + FieldDescriptor.of(org.jboss.logmanager.Level.class, levelName, org.jboss.logmanager.Level.class)); + return method + .invokeVirtualMethod(MethodDescriptor.ofMethod(Level.class, "intValue", int.class), infoLevel); + } + + private static void generateDefaultLoggingLogger(Level minLevel, ClassOutput output) { + try (ClassCreator cc = ClassCreator.builder().setFinal(true) + .className(LOGGING_LOGGER_CLASS_NAME) + .classOutput(output).build()) { + + AnnotationCreator targetClass = cc.addAnnotation("com.oracle.svm.core.annotate.TargetClass"); + targetClass.addValue("className", "org.jboss.logging.Logger"); + + if (minLevel.intValue() >= org.jboss.logmanager.Level.INFO.intValue()) { + // Constant fold these methods to return false, + // since the build time log level is above this level. + generateFalseFoldMethod("isTraceEnabled", cc); + generateFalseFoldMethod("isDebugEnabled", cc); + } else if (minLevel.intValue() == org.jboss.logmanager.Level.DEBUG.intValue()) { + generateFalseFoldMethod("isTraceEnabled", cc); + } + } + } + + /** + * Generates a method that is constant-folded to always return false. + */ + private static void generateFalseFoldMethod(String name, ClassCreator cc) { + MethodCreator method = cc.getMethodCreator(name, boolean.class); + method.addAnnotation("com.oracle.svm.core.annotate.Substitute"); + method.addAnnotation("org.graalvm.compiler.api.replacements.Fold"); + method.returnValue(method.load(false)); + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsCapabilityBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsCapabilityBuildItem.java new file mode 100644 index 0000000000000..c4f9fd9a183ae --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsCapabilityBuildItem.java @@ -0,0 +1,43 @@ +package io.quarkus.deployment.metrics; + +import io.quarkus.builder.item.SimpleBuildItem; + +public final class MetricsCapabilityBuildItem extends SimpleBuildItem { + @FunctionalInterface + public interface MetricsCapability { + boolean isSupported(String value); + } + + final String path; + final MetricsCapability metricsCapability; + + public MetricsCapabilityBuildItem(MetricsCapability metricsCapability) { + this(metricsCapability, null); + } + + public MetricsCapabilityBuildItem(MetricsCapability metricsCapability, String path) { + this.metricsCapability = metricsCapability; + this.path = path; + } + + /** + * Test for a known metrics system to allow selective initialization of metrics + * based using a known API. Avoid using deployment module artifacts. Ensure that + * metrics API dependencies remain optional / compile-time only. + * + * @return true if this factory supports the named metrics system. Arbitrary + * strings are allowed. Constants are present for a few. + * @see io.quarkus.runtime.metrics.MetricsFactory#MICROMETER + * @see io.quarkus.runtime.metrics.MetricsFactory#MP_METRICS + */ + public boolean metricsSupported(String name) { + return metricsCapability.isSupported(name); + } + + /** + * @return the configured Metrics Endpoint (if an endpoint is enabled) or null + */ + public String metricsEndpoint() { + return path; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsFactoryConsumerBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsFactoryConsumerBuildItem.java new file mode 100644 index 0000000000000..cc6bc1efb17bd --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/metrics/MetricsFactoryConsumerBuildItem.java @@ -0,0 +1,34 @@ +package io.quarkus.deployment.metrics; + +import java.util.function.Consumer; + +import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.runtime.metrics.MetricsFactory; + +/** + * A metrics provider will iterate over all MetricsFactory consumers, + * allowing them to register metrics via bytecode recording + */ +public final class MetricsFactoryConsumerBuildItem extends MultiBuildItem { + private final Consumer factoryConsumer; + private final ExecutionTime executionTime; + + public MetricsFactoryConsumerBuildItem(Consumer factoryConsumer) { + this.factoryConsumer = factoryConsumer; + this.executionTime = ExecutionTime.RUNTIME_INIT; + } + + public MetricsFactoryConsumerBuildItem(Consumer factoryConsumer, ExecutionTime executionTime) { + this.factoryConsumer = factoryConsumer; + this.executionTime = executionTime; + } + + public Consumer getConsumer() { + return factoryConsumer; + } + + public ExecutionTime executionTime() { + return executionTime; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/mutability/DevModeTask.java b/core/deployment/src/main/java/io/quarkus/deployment/mutability/DevModeTask.java index 5dbdd223da32e..ee28f31a587bb 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/mutability/DevModeTask.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/mutability/DevModeTask.java @@ -56,7 +56,7 @@ public static Closeable main(Path appRoot) throws Exception { .setAppArtifact(existingModel.getAppArtifact()) .setExistingModel(existingModel) .setIsolateDeployment(true) - .setMode(QuarkusBootstrap.Mode.DEV) + .setMode(QuarkusBootstrap.Mode.REMOTE_DEV_SERVER) .setBuildSystemProperties(buildSystemProperties) .setBaseName(appModel.getBaseName()) .setApplicationRoot(existingModel.getAppArtifact().getPath()) @@ -129,8 +129,7 @@ public static void extractDevModeClasses(Path appRoot, AppModel appModel, PostEx //not all local projects are dependencies continue; } - IoUtils.recursiveDelete(moduleClasses); - Files.createDirectories(moduleClasses); + IoUtils.createOrEmptyDir(moduleClasses); for (Path p : dep.getPaths()) { if (Files.isDirectory(p)) { Path moduleTarget = moduleClasses; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java index d185bb3d26df7..548483363da16 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java @@ -8,6 +8,8 @@ import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.quarkus.runtime.annotations.ConvertWith; +import io.quarkus.runtime.configuration.TrimmedStringConverter; @ConfigRoot(phase = ConfigPhase.BUILD_TIME) public class NativeConfig { @@ -44,16 +46,40 @@ public class NativeConfig { public boolean enableJni; /** - * If all character sets should be added to the native image. This increases image size + * Defines the user language used for building the native executable. + *

+ * Defaults to the system one. */ - @ConfigItem - public boolean addAllCharsets; + @ConfigItem(defaultValue = "${user.language:}") + @ConvertWith(TrimmedStringConverter.class) + public Optional userLanguage; + + /** + * Defines the user country used for building the native executable. + *

+ * Defaults to the system one. + */ + @ConfigItem(defaultValue = "${user.country:}") + @ConvertWith(TrimmedStringConverter.class) + public Optional userCountry; + + /** + * Defines the file encoding as in -Dfile.encoding=... + * + * Native image runtime uses the host's (i.e. build time) value of file.encoding + * system property. We intentionally default this to UTF-8 to avoid platform specific + * defaults to be picked up which can then result in inconsistent behavior in the + * generated native executable. + */ + @ConfigItem(defaultValue = "UTF-8") + @ConvertWith(TrimmedStringConverter.class) + public String fileEncoding; /** - * If all time zones should be added to the native image. This increases image size + * If all character sets should be added to the native image. This increases image size */ @ConfigItem - public boolean includeAllTimeZones; + public boolean addAllCharsets; /** * The location of the Graal distribution @@ -131,10 +157,16 @@ public class NativeConfig { @ConfigItem public boolean containerBuild; + /** + * If this build is done using a remote docker daemon. + */ + @ConfigItem + public boolean remoteContainerBuild; + /** * The docker image to use to do the image build */ - @ConfigItem(defaultValue = "quay.io/quarkus/ubi-quarkus-native-image:20.1.0-java11") + @ConfigItem(defaultValue = "${platform.quarkus.native.builder-image}") public String builderImage; /** @@ -142,7 +174,7 @@ public class NativeConfig { * a container build is always done. */ @ConfigItem - public Optional containerRuntime; + public Optional containerRuntime; /** * Options to pass to the container runtime @@ -269,6 +301,29 @@ public static class ResourcesConfig { @ConfigItem public Optional> includes; + /** + * A comma separated list of globs to match resource paths that should not be added to the native image. + *

+ * Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash. + *

+ * Please refer to {@link #includes} for details about the glob syntax. + *

+ * By default, no resources are excluded. + *

+ * Example: Given that you have {@code src/main/resources/red.png} + * and {@code src/main/resources/foo/green.png} in your source tree and one of your dependency JARs contains + * {@code bar/blue.png} file, with the following configuration + * + *

+         * quarkus.native.resources.includes = **/*.png
+         * quarkus.native.resources.excludes = foo/**,**/green.png
+         * 
+ * + * the resource {@code red.png} will be available in the native image while the resources {@code foo/green.png} + * and {@code bar/blue.png} will not be available in the native image. + */ + @ConfigItem + public Optional> excludes; } /** @@ -286,4 +341,22 @@ public static class Debug { @ConfigItem public boolean enabled; } + + /** + * Generate the report files for GraalVM Dashboard. + */ + @ConfigItem + public boolean enableDashboardDump; + + /** + * Supported Container runtimes + */ + public static enum ContainerRuntime { + DOCKER, + PODMAN; + + public String getExecutableName() { + return this.name().toLowerCase(); + } + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java index ce29c80900b3b..205c0be2ab3bf 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java @@ -3,6 +3,7 @@ import java.util.List; import java.util.Optional; +import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; @@ -11,33 +12,27 @@ public class PackageConfig { public static final String JAR = "jar"; public static final String UBER_JAR = "uber-jar"; - /** - * This is the new packaging format, it is intended to become the default soonish, so it will just - * be referred to as 'jar'. - */ public static final String FAST_JAR = "fast-jar"; public static final String MUTABLE_JAR = "mutable-jar"; + /** + * @deprecated use 'legacy-jar' instead + */ + @Deprecated public static final String LEGACY = "legacy"; + public static final String LEGACY_JAR = "legacy-jar"; public static final String NATIVE = "native"; + // does everything 'native' but stops short of actually executing the 'native-image' command + public static final String NATIVE_SOURCES = "native-sources"; /** * The requested output type. - * - * The default built in types are 'jar', 'fast-jar' (a prototype more performant version of the default 'jar' type), - * 'uber-jar' and 'native'. + *

+ * The default built in types are 'jar' (which will use 'fast-jar'), 'legacy-jar' for the pre-1.12 default jar + * packaging, 'uber-jar', 'native' and 'native-sources'. */ @ConfigItem(defaultValue = JAR) public String type; - /** - * If the java runner should be packed as an uberjar - * - * This is deprecated, you should use quarkus.package.type=uber-jar instead - */ - @Deprecated - @ConfigItem(defaultValue = "false") - public boolean uberJar; - /** * Manifest configuration of the runner jar. */ @@ -47,7 +42,7 @@ public class PackageConfig { /** * The entry point of the application. This can either be a a fully qualified name of a standard Java * class with a main method, or {@link io.quarkus.runtime.QuarkusApplication}. - * + *

* If your application has main classes annotated with {@link io.quarkus.runtime.annotations.QuarkusMain} * then this can also reference the name given in the annotation, to avoid the need to specify fully qualified * names in the config. @@ -89,34 +84,86 @@ public class PackageConfig { @ConfigItem public boolean createAppcds; + /** + * When AppCDS generation is enabled, if this property is set, then the JVM used to generate the AppCDS file + * will be the JVM present in the container image. The builder image is expected to have have the 'java' binary + * on its PATH. + * This flag is useful when the JVM to be used at runtime is not the same exact JVM version as the one used to build + * the jar. + * Note that this property is consulted only when {@code quarkus.package.create-appcds=true} and it requires having + * docker available during the build. + */ + @ConfigItem + public Optional appcdsBuilderImage; + /** * This is an advanced option that only takes effect for the mutable-jar format. - * + *

* If this is specified a directory of this name will be created in the jar distribution. Users can place * jar files in this directory, and when re-augmentation is performed these will be processed and added to the * class-path. - * + *

* Note that before reaugmentation has been performed these jars will be ignored, and if they are updated the app * should be reaugmented again. */ @ConfigItem public Optional userProvidersDirectory; + /** + * This option only applies when using fast-jar or mutable-jar. If this option is true + * then a list of all the coordinates of the artifacts that made up this image will be included + * in the quarkus-app directory. This list can be used by vulnerability scanners to determine + * if your application has any vulnerable dependencies. + */ + @ConfigItem(defaultValue = "true") + public boolean includeDependencyList; + + /** + * Fernflower Decompiler configuration + */ + @ConfigItem + public FernflowerConfig fernflower; + public boolean isAnyJarType() { - return (type.equalsIgnoreCase(PackageConfig.LEGACY) || - type.equalsIgnoreCase(PackageConfig.JAR) || + return (type.equalsIgnoreCase(PackageConfig.JAR) || type.equalsIgnoreCase(PackageConfig.FAST_JAR) || type.equalsIgnoreCase(PackageConfig.UBER_JAR)) || + type.equalsIgnoreCase(PackageConfig.LEGACY_JAR) || + type.equalsIgnoreCase(PackageConfig.LEGACY) || type.equalsIgnoreCase(PackageConfig.MUTABLE_JAR); } public boolean isFastJar() { - return type.equalsIgnoreCase(PackageConfig.FAST_JAR) || + return type.equalsIgnoreCase(PackageConfig.JAR) || + type.equalsIgnoreCase(PackageConfig.FAST_JAR) || type.equalsIgnoreCase(PackageConfig.MUTABLE_JAR); } public boolean isLegacyJar() { - return (type.equalsIgnoreCase(PackageConfig.LEGACY) || - type.equalsIgnoreCase(PackageConfig.JAR)); + return (type.equalsIgnoreCase(PackageConfig.LEGACY_JAR) || + type.equalsIgnoreCase(PackageConfig.LEGACY)); + } + + @ConfigGroup + public static class FernflowerConfig { + + /** + * An advanced option that will decompile generated and transformed bytecode into the 'decompiled' directory. + * This is only taken into account when fast-jar is used. + */ + @ConfigItem(defaultValue = "false") + public boolean enabled; + + /** + * The git hash to use to download the fernflower tool from https://jitpack.io/com/github/fesh0r/fernflower/ + */ + @ConfigItem(defaultValue = "dbf407a655") + public String hash; + + /** + * The directory into which to save the fernflower tool if it doesn't exist + */ + @ConfigItem(defaultValue = "${user.home}/.quarkus") + public String jarDirectory; } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/AppCDSContainerImageBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/AppCDSContainerImageBuildItem.java new file mode 100644 index 0000000000000..45f5e6aff633b --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/AppCDSContainerImageBuildItem.java @@ -0,0 +1,19 @@ +package io.quarkus.deployment.pkg.builditem; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * Indicates that a specific container image should be used to generate the AppCDS file + */ +public final class AppCDSContainerImageBuildItem extends SimpleBuildItem { + + private final String containerImage; + + public AppCDSContainerImageBuildItem(String containerImage) { + this.containerImage = containerImage; + } + + public String getContainerImage() { + return containerImage; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ArtifactResultBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ArtifactResultBuildItem.java index 7b23a39582d65..031abef44544b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ArtifactResultBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ArtifactResultBuildItem.java @@ -16,12 +16,12 @@ public final class ArtifactResultBuildItem extends MultiBuildItem { private final Path path; private final String type; - private final Map additionalPaths; + private final Map metadata; - public ArtifactResultBuildItem(Path path, String type, Map additionalPaths) { + public ArtifactResultBuildItem(Path path, String type, Map metadata) { this.path = path; this.type = type; - this.additionalPaths = additionalPaths; + this.metadata = metadata; } public Path getPath() { @@ -32,7 +32,7 @@ public String getType() { return type; } - public Map getAdditionalPaths() { - return additionalPaths; + public Map getMetadata() { + return metadata; } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java index 2a27f9d42c075..5db2a885bee75 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java @@ -11,12 +11,14 @@ public final class JarBuildItem extends SimpleBuildItem { private final Path originalArtifact; private final Path libraryDir; private final String type; + private final String classifier; - public JarBuildItem(Path path, Path originalArtifact, Path libraryDir, String type) { + public JarBuildItem(Path path, Path originalArtifact, Path libraryDir, String type, String classifier) { this.path = path; this.originalArtifact = originalArtifact; this.libraryDir = libraryDir; this.type = type; + this.classifier = classifier; } public boolean isUberJar() { @@ -39,7 +41,11 @@ public String getType() { return type; } + public String getClassifier() { + return classifier; + } + public JarResult toJarResult() { - return new JarResult(path, originalArtifact, libraryDir, type); + return new JarResult(path, originalArtifact, libraryDir, type, classifier); } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/LegacyJarRequiredBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/LegacyJarRequiredBuildItem.java new file mode 100644 index 0000000000000..7a337c2e7194b --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/LegacyJarRequiredBuildItem.java @@ -0,0 +1,9 @@ +package io.quarkus.deployment.pkg.builditem; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * marker build item that extensions can use to force legacy jar creation + */ +public final class LegacyJarRequiredBuildItem extends MultiBuildItem { +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java index 1198ea1da1592..05daa7f5fdb25 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java @@ -1,10 +1,11 @@ package io.quarkus.deployment.pkg.steps; +import static io.quarkus.deployment.pkg.steps.LinuxIDUtil.getLinuxID; + import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -18,105 +19,137 @@ import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.pkg.PackageConfig; +import io.quarkus.deployment.pkg.builditem.AppCDSContainerImageBuildItem; import io.quarkus.deployment.pkg.builditem.AppCDSRequestedBuildItem; import io.quarkus.deployment.pkg.builditem.AppCDSResultBuildItem; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; import io.quarkus.deployment.pkg.builditem.JarBuildItem; import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; import io.quarkus.deployment.steps.MainClassBuildStep; -import io.quarkus.deployment.util.JavaVersionUtil; import io.quarkus.runtime.LaunchMode; +import io.quarkus.runtime.util.JavaVersionUtil; import io.quarkus.utilities.JavaBinFinder; public class AppCDSBuildStep { private static final Logger log = Logger.getLogger(AppCDSBuildStep.class); - public static final String JDK_CLASSLIST_FILE = "classlist"; + public static final String CLASSES_LIST_FILE_NAME = "classes.lst"; + private static final String CONTAINER_IMAGE_BASE_BUILD_DIR = "/tmp/quarkus"; + private static final String CONTAINER_IMAGE_APPCDS_DIR = CONTAINER_IMAGE_BASE_BUILD_DIR + "/appcds"; @BuildStep(onlyIf = AppCDSRequired.class) public void requested(OutputTargetBuildItem outputTarget, BuildProducer producer) throws IOException { Path appCDSDir = outputTarget.getOutputDirectory().resolve("appcds"); - IoUtils.recursiveDelete(appCDSDir); - Files.createDirectories(appCDSDir); + IoUtils.createOrEmptyDir(appCDSDir); producer.produce(new AppCDSRequestedBuildItem(outputTarget.getOutputDirectory().resolve("appcds"))); } @BuildStep public void build(Optional appCDsRequested, - JarBuildItem jarResult, PackageConfig packageConfig, + JarBuildItem jarResult, OutputTargetBuildItem outputTarget, PackageConfig packageConfig, + Optional appCDSContainerImage, BuildProducer appCDS, BuildProducer artifactResult) throws Exception { if (!appCDsRequested.isPresent()) { return; } - Path appCDSDir = appCDsRequested.get().getAppCDSDir(); - String javaHomeStr = System.getProperty("java.home"); - Path javaHomeDir = Paths.get(javaHomeStr); - Path jdkClassList = javaHomeDir.resolve("lib").resolve(JDK_CLASSLIST_FILE); - if (!jdkClassList.toFile().exists()) { - log.warnf( - "In order to create AppCDS the JDK used to build the Quarkus application must contain a file named '%s' in the its 'lib' directory.", - JDK_CLASSLIST_FILE); - return; - } - String javaExecutableStr = JavaBinFinder.simpleBinaryName(); - String javaBinStr = javaHomeStr + File.separator + "bin" + File.separator + javaExecutableStr; - if (!new File(javaBinStr).canExecute()) { - log.warnf( - "In order to create AppCDS the JDK used to build the Quarkus application must contain an executable named '%s' in its 'bin' directory.", - javaBinStr); - return; + // to actually execute the commands needed to generate the AppCDS file, either the JVM in the container image will be used + // (if specified), or the JVM running the build + String containerImage = determineContainerImage(packageConfig, appCDSContainerImage); + String javaBinPath = null; + if (containerImage == null) { + javaBinPath = System.getProperty("java.home") + File.separator + "bin" + File.separator + + JavaBinFinder.simpleBinaryName(); + if (!new File(javaBinPath).canExecute()) { + log.warnf( + "In order to create AppCDS the JDK used to build the Quarkus application must contain an executable named '%s' in its 'bin' directory.", + javaBinPath); + return; + } } - Path classesLstPath = createClassesLst(packageConfig, jarResult, javaBinStr, appCDSDir); + Path classesLstPath = createClassesLst(jarResult, outputTarget, javaBinPath, containerImage, + appCDsRequested.get().getAppCDSDir(), packageConfig.isFastJar()); if (classesLstPath == null) { - log.warnf("Unable to create AppCDS because '%s' was not created.", CLASSES_LIST_FILE_NAME); return; } log.debugf("'%s' successfully created.", CLASSES_LIST_FILE_NAME); log.info("Launching AppCDS creation process."); - Path appCDSPath = createAppCDS(jarResult, javaBinStr, classesLstPath, packageConfig.isFastJar()); + Path appCDSPath = createAppCDS(jarResult, outputTarget, javaBinPath, containerImage, classesLstPath, + packageConfig.isFastJar()); if (appCDSPath == null) { log.warn("Unable to create AppCDS."); return; } - log.infof( - "AppCDS successfully created at: '%s'.\nTo ensure they are loaded properly, " + - "run the application jar from its directory and also add the '-XX:SharedArchiveFile=app-cds.jsa' " + - "JVM flag.\nMoreover, make sure to use the exact same Java version (%s) to run the application as was used to build it.", - appCDSPath.toAbsolutePath().toString(), System.getProperty("java.version")); + log.infof("AppCDS successfully created at: '%s'.", appCDSPath.toAbsolutePath().toString()); + if (containerImage == null) { + log.infof( + "To ensure they are loaded properly, " + + "run the application jar from its directory and also add the '-XX:SharedArchiveFile=app-cds.jsa' " + + "JVM flag.\nMoreover, make sure to use the exact same Java version (%s) to run the application as was used to build it.", + System.getProperty("java.version")); + } appCDS.produce(new AppCDSResultBuildItem(appCDSPath)); artifactResult.produce(new ArtifactResultBuildItem(appCDSPath, "appCDS", Collections.emptyMap())); } + private String determineContainerImage(PackageConfig packageConfig, + Optional appCDSContainerImage) { + if (packageConfig.appcdsBuilderImage.isPresent()) { + return packageConfig.appcdsBuilderImage.get(); + } else if (appCDSContainerImage.isPresent()) { + return appCDSContainerImage.get().getContainerImage(); + } + return null; + } + /** * @return The path of the created classes.lst file or null if the file was not created */ - private Path createClassesLst(PackageConfig packageConfig, JarBuildItem jarResult, - String javaBin, Path appCDSDir) { - - List command = new ArrayList<>(5); - command.add(javaBin); - command.add("-XX:DumpLoadedClassList=" + CLASSES_LIST_FILE_NAME); - command.add(String.format("-D%s=true", MainClassBuildStep.GENERATE_APP_CDS_SYSTEM_PROPERTY)); - command.add("-jar"); - if (packageConfig.isFastJar()) { - command.add(jarResult.getLibraryDir().getParent().resolve(JarResultBuildStep.QUARKUS_RUN_JAR).toAbsolutePath() - .toString()); + private Path createClassesLst(JarBuildItem jarResult, + OutputTargetBuildItem outputTarget, String javaBinPath, String containerImage, Path appCDSDir, boolean isFastJar) { + + List commonJavaArgs = new ArrayList<>(3); + commonJavaArgs.add("-XX:DumpLoadedClassList=" + CLASSES_LIST_FILE_NAME); + commonJavaArgs.add(String.format("-D%s=true", MainClassBuildStep.GENERATE_APP_CDS_SYSTEM_PROPERTY)); + commonJavaArgs.add("-jar"); + + List command; + if (containerImage != null) { + List dockerRunCommand = dockerRunCommands(outputTarget, containerImage, CONTAINER_IMAGE_APPCDS_DIR); + command = new ArrayList<>(dockerRunCommand.size() + 1 + commonJavaArgs.size()); + command.addAll(dockerRunCommand); + command.add("java"); + command.addAll(commonJavaArgs); + if (isFastJar) { + command.add(CONTAINER_IMAGE_BASE_BUILD_DIR + "/" + JarResultBuildStep.DEFAULT_FAST_JAR_DIRECTORY_NAME + "/" + + JarResultBuildStep.QUARKUS_RUN_JAR); + } else { + command.add(CONTAINER_IMAGE_BASE_BUILD_DIR + "/" + jarResult.getPath().getFileName().toString()); + } } else { - command.add(jarResult.getPath().toAbsolutePath().toString()); + command = new ArrayList<>(2 + commonJavaArgs.size()); + command.add(javaBinPath); + command.addAll(commonJavaArgs); + if (isFastJar) { + command + .add(jarResult.getLibraryDir().getParent().resolve(JarResultBuildStep.QUARKUS_RUN_JAR).toAbsolutePath() + .toString()); + } else { + command.add(jarResult.getPath().toAbsolutePath().toString()); + } } if (log.isDebugEnabled()) { - log.debugf("Launching command: '%s' to create '" + CLASSES_LIST_FILE_NAME + "' AppCDS.", String.join(" ", command)); + log.debugf("Launching command: '%s' to create '" + CLASSES_LIST_FILE_NAME + "' file.", String.join(" ", command)); } int exitCode; @@ -140,16 +173,47 @@ private Path createClassesLst(PackageConfig packageConfig, JarBuildItem jarResul return null; } - return appCDSDir.resolve(CLASSES_LIST_FILE_NAME); + Path result = appCDSDir.resolve(CLASSES_LIST_FILE_NAME); + if (!Files.exists(result)) { + log.warnf("Unable to create AppCDS because '%s' was not created. Check the logs for details", + CLASSES_LIST_FILE_NAME); + return null; + } + return result; + } + + // the idea here is to use 'docker run -v ... java ...' in order to utilize the JVM of the builder image to + // generate the classes file on the host + private List dockerRunCommands(OutputTargetBuildItem outputTarget, String containerImage, + String containerWorkingDir) { + List command = new ArrayList<>(10); + command.add("docker"); + command.add("run"); + command.add("-v"); + command.add(outputTarget.getOutputDirectory().toAbsolutePath().toString() + ":" + CONTAINER_IMAGE_BASE_BUILD_DIR + + ":z"); + if (SystemUtils.IS_OS_LINUX) { + String uid = getLinuxID("-ur"); + String gid = getLinuxID("-gr"); + if (uid != null && gid != null && !uid.isEmpty() && !gid.isEmpty()) { + command.add("--user"); + command.add(uid + ":" + gid); + } + } + command.add("-w"); + command.add(containerWorkingDir); + command.add("--rm"); + command.add(containerImage); + return command; } /** * @return The path of the created app-cds.jsa file or null if the file was not created */ - private Path createAppCDS(JarBuildItem jarResult, String javaBin, - Path classesLstPath, boolean isFastFar) { + private Path createAppCDS(JarBuildItem jarResult, OutputTargetBuildItem outputTarget, String javaBinPath, + String containerImage, Path classesLstPath, boolean isFastFar) { - Path workingDirectory = isFastFar ? jarResult.getPath().getParent().getParent() : jarResult.getPath().getParent(); + Path workingDirectory = jarResult.getPath().getParent(); Path appCDSPath = workingDirectory.resolve("app-cds.jsa"); if (appCDSPath.toFile().exists()) { try { @@ -159,20 +223,35 @@ private Path createAppCDS(JarBuildItem jarResult, String javaBin, } } - List command = new ArrayList<>(6); - command.add(javaBin); - command.add("-Xshare:dump"); - command.add("-XX:SharedClassListFile=" + classesLstPath.toAbsolutePath().toString()); + List javaArgs = new ArrayList<>(5); + javaArgs.add("-Xshare:dump"); + javaArgs.add("-XX:SharedClassListFile=" + + ((containerImage != null) ? CONTAINER_IMAGE_APPCDS_DIR + "/" + classesLstPath.getFileName().toString() + : classesLstPath.toAbsolutePath().toString())); // We use the relative paths because at runtime 'java -XX:SharedArchiveFile=... -jar ...' expects the AppCDS and jar files // to match exactly what was used at build time. // For that reason we also run the creation process from inside the output directory, // The end result is that users can simply use 'java -XX:SharedArchiveFile=app-cds.jsa -jar app.jar' - command.add("-XX:SharedArchiveFile=" + appCDSPath.getFileName().toString()); - command.add("--class-path"); + javaArgs.add("-XX:SharedArchiveFile=" + appCDSPath.getFileName().toString()); + javaArgs.add("--class-path"); if (isFastFar) { - command.add(JarResultBuildStep.QUARKUS_RUN_JAR); + javaArgs.add(JarResultBuildStep.QUARKUS_RUN_JAR); + } else { + javaArgs.add(jarResult.getPath().getFileName().toString()); + } + + List command; + if (containerImage != null) { + List dockerRunCommand = dockerRunCommands(outputTarget, containerImage, + CONTAINER_IMAGE_BASE_BUILD_DIR + "/" + JarResultBuildStep.DEFAULT_FAST_JAR_DIRECTORY_NAME); + command = new ArrayList<>(dockerRunCommand.size() + 1 + javaArgs.size()); + command.addAll(dockerRunCommand); + command.add("java"); + command.addAll(javaArgs); } else { - command.add(jarResult.getPath().getFileName().toString()); + command = new ArrayList<>(1 + javaArgs.size()); + command.add(javaBinPath); + command.addAll(javaArgs); } if (log.isDebugEnabled()) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java index f1dfdc84ce467..f9475b4f36def 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java @@ -5,19 +5,23 @@ import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import static java.nio.file.StandardOpenOption.WRITE; +import java.io.BufferedInputStream; import java.io.BufferedWriter; +import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.UncheckedIOException; +import java.net.URL; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystem; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; @@ -26,6 +30,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.EnumSet; import java.util.Enumeration; import java.util.HashMap; @@ -74,9 +79,11 @@ import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; import io.quarkus.deployment.pkg.builditem.JarBuildItem; +import io.quarkus.deployment.pkg.builditem.LegacyJarRequiredBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageSourceJarBuildItem; import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; import io.quarkus.deployment.pkg.builditem.UberJarRequiredBuildItem; +import io.quarkus.deployment.util.FileUtil; /** * This build step builds both the thin jars and uber jars. @@ -119,6 +126,8 @@ public class JarResultBuildStep { "META-INF/quarkus-extension.yaml", "META-INF/quarkus-deployment-dependency.graph", "META-INF/jandex.idx", + "META-INF/panache-archive.marker", + "META-INF/build.metadata", // present in the Red Hat Build of Quarkus "LICENSE"); private static final Logger log = Logger.getLogger(JarResultBuildStep.class); @@ -131,6 +140,7 @@ public class JarResultBuildStep { public static final String DEPLOYMENT_LIB = "deployment"; public static final String APPMODEL_DAT = "appmodel.dat"; public static final String QUARKUS_RUN_JAR = "quarkus-run.jar"; + public static final String QUARKUS_APP_DEPS = "quarkus-app-dependencies.txt"; public static final String BOOT_LIB = "boot"; public static final String LIB = "lib"; public static final String MAIN = "main"; @@ -139,7 +149,7 @@ public class JarResultBuildStep { public static final String APP = "app"; public static final String QUARKUS = "quarkus"; public static final String DEFAULT_FAST_JAR_DIRECTORY_NAME = "quarkus-app"; - public static final String RENAMED_JAR_EXTENSION = ".jar.original"; + public static final String MP_CONFIG_FILE = "META-INF/microprofile-config.properties"; @BuildStep OutputTargetBuildItem outputTarget(BuildSystemTargetBuildItem bst, PackageConfig packageConfig) { @@ -169,6 +179,7 @@ public JarBuildItem buildRunnerJar(CurateOutcomeBuildItem curateOutcomeBuildItem List generatedClasses, List generatedResources, List uberJarRequired, + List legacyJarRequired, QuarkusBuildCloseablesBuildItem closeablesBuildItem, List additionalApplicationArchiveBuildItems, MainClassBuildItem mainClassBuildItem, Optional appCDS) throws Exception { @@ -176,19 +187,19 @@ public JarBuildItem buildRunnerJar(CurateOutcomeBuildItem curateOutcomeBuildItem if (appCDS.isPresent()) { handleAppCDSSupportFileGeneration(transformedClasses, generatedClasses, appCDS.get()); } - if (!(packageConfig.type.equalsIgnoreCase(PackageConfig.JAR) || - packageConfig.type.equalsIgnoreCase(PackageConfig.UBER_JAR)) - && packageConfig.uberJar) { + + if (!uberJarRequired.isEmpty() && !legacyJarRequired.isEmpty()) { throw new RuntimeException( - "Cannot set quarkus.package.uber-jar=true and quarkus.package.type, if you want an uber-jar set quarkus.package.type=uber-jar."); + "Extensions with conflicting package types. One extension requires uber-jar another requires legacy format"); } - if (!uberJarRequired.isEmpty() || packageConfig.uberJar - || packageConfig.type.equalsIgnoreCase(PackageConfig.UBER_JAR)) { + if (legacyJarRequired.isEmpty() && (!uberJarRequired.isEmpty() + || packageConfig.type.equalsIgnoreCase(PackageConfig.UBER_JAR))) { return buildUberJar(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, packageConfig, applicationInfo, generatedClasses, generatedResources, closeablesBuildItem, mainClassBuildItem); - } else if (packageConfig.isLegacyJar()) { + } else if (!legacyJarRequired.isEmpty() || packageConfig.isLegacyJar() + || packageConfig.type.equalsIgnoreCase(PackageConfig.LEGACY)) { return buildLegacyThinJar(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, packageConfig, applicationInfo, generatedClasses, generatedResources, mainClassBuildItem); @@ -255,15 +266,14 @@ private JarBuildItem buildUberJar(CurateOutcomeBuildItem curateOutcomeBuildItem, final Path standardJar = outputTargetBuildItem.getOutputDirectory() .resolve(outputTargetBuildItem.getBaseName() + ".jar"); - final Path originalJar; - if (Files.exists(standardJar)) { - originalJar = outputTargetBuildItem.getOutputDirectory() - .resolve(outputTargetBuildItem.getBaseName() + RENAMED_JAR_EXTENSION); - } else { - originalJar = null; - } + final Path originalJar = Files.exists(standardJar) ? standardJar : null; + + return new JarBuildItem(runnerJar, originalJar, null, PackageConfig.UBER_JAR, + suffixToClassifier(packageConfig.runnerSuffix)); + } - return new JarBuildItem(runnerJar, originalJar, null, PackageConfig.UBER_JAR); + private String suffixToClassifier(String suffix) { + return suffix.startsWith("-") ? suffix.substring(1) : suffix; } private void buildUberJar0(CurateOutcomeBuildItem curateOutcomeBuildItem, @@ -409,8 +419,7 @@ private JarBuildItem buildLegacyThinJar(CurateOutcomeBuildItem curateOutcomeBuil .resolve(outputTargetBuildItem.getBaseName() + packageConfig.runnerSuffix + ".jar"); Path libDir = outputTargetBuildItem.getOutputDirectory().resolve("lib"); Files.deleteIfExists(runnerJar); - IoUtils.recursiveDelete(libDir); - Files.createDirectories(libDir); + IoUtils.createOrEmptyDir(libDir); try (FileSystem runnerZipFs = ZipUtils.newZip(runnerJar)) { @@ -421,7 +430,8 @@ private JarBuildItem buildLegacyThinJar(CurateOutcomeBuildItem curateOutcomeBuil } runnerJar.toFile().setReadable(true, false); - return new JarBuildItem(runnerJar, null, libDir, PackageConfig.LEGACY); + return new JarBuildItem(runnerJar, null, libDir, PackageConfig.LEGACY_JAR, + suffixToClassifier(packageConfig.runnerSuffix)); } private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, @@ -459,8 +469,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, userProviders = buildDir.resolve(packageConfig.userProvidersDirectory.get()); } if (!rebuild) { - IoUtils.recursiveDelete(buildDir); - Files.createDirectories(buildDir); + IoUtils.createOrEmptyDir(buildDir); Files.createDirectories(mainLib); Files.createDirectories(baseLib); Files.createDirectories(appDir); @@ -472,11 +481,30 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, Files.createFile(userProviders.resolve(".keep")); } } else { - IoUtils.recursiveDelete(quarkus); - Files.createDirectories(quarkus); + IoUtils.createOrEmptyDir(quarkus); } Map> copiedArtifacts = new HashMap<>(); + Path fernflowerJar = null; + Path decompiledOutputDir = null; + boolean wasDecompiledSuccessfully = true; + if (packageConfig.fernflower.enabled) { + Path jarDirectory = Paths.get(packageConfig.fernflower.jarDirectory); + if (!Files.exists(jarDirectory)) { + Files.createDirectory(jarDirectory); + } + fernflowerJar = jarDirectory.resolve(String.format("fernflower-%s.jar", packageConfig.fernflower.hash)); + if (!Files.exists(fernflowerJar)) { + boolean downloadComplete = downloadFernflowerJar(packageConfig, fernflowerJar); + if (!downloadComplete) { + fernflowerJar = null; // will ensure that no decompilation takes place + } + } + decompiledOutputDir = buildDir.getParent().resolve("decompiled"); + FileUtil.deleteDirectory(decompiledOutputDir); + Files.createDirectory(decompiledOutputDir); + } + List jars = new ArrayList<>(); List bootJars = new ArrayList<>(); //we process in order of priority @@ -496,6 +524,9 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, } } } + if (fernflowerJar != null) { + wasDecompiledSuccessfully &= decompile(fernflowerJar, decompiledOutputDir, transformedZip); + } } //now generated classes and resources Path generatedZip = quarkus.resolve(GENERATED_BYTECODE_JAR); @@ -518,6 +549,14 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, Files.write(target, i.getClassData()); } } + if (fernflowerJar != null) { + wasDecompiledSuccessfully &= decompile(fernflowerJar, decompiledOutputDir, generatedZip); + } + + if (wasDecompiledSuccessfully && (decompiledOutputDir != null)) { + log.info("The decompiled output can be found at: " + decompiledOutputDir.toAbsolutePath().toString()); + } + //now the application classes Path runnerJar = appDir .resolve(outputTargetBuildItem.getBaseName() + ".jar"); @@ -540,7 +579,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, } else { copyDependency(curateOutcomeBuildItem, copiedArtifacts, mainLib, baseLib, jars, true, classPath, appDep); } - if (curateOutcomeBuildItem.getEffectiveModel().getParentFirstArtifacts() + if (curateOutcomeBuildItem.getEffectiveModel().getRunnerParentFirstArtifacts() .contains(appDep.getArtifact().getKey())) { bootJars.addAll(appDep.getArtifact().getPaths().toList()); } @@ -555,9 +594,26 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, jars.add(path); } } + + /* + * There are some files like META-INF/microprofile-config.properties that usually don't exist in application + * and yet are always looked up (spec compliance...) and due to the location in the jar, + * the RunnerClassLoader needs to look into every jar to determine whether they exist or not. + * In keeping true to the original design of the RunnerClassLoader which indexes the directory structure, + * we just add a fail-fast path for files we know don't exist. + * + * TODO: if this gets more complex, we'll probably want a build item to carry this information instead of hard + * coding it here + */ + List nonExistentResources = new ArrayList<>(1); + Enumeration mpConfigURLs = Thread.currentThread().getContextClassLoader().getResources(MP_CONFIG_FILE); + if (!mpConfigURLs.hasMoreElements()) { + nonExistentResources.add(MP_CONFIG_FILE); + } + Path appInfo = buildDir.resolve(QuarkusEntryPoint.QUARKUS_APPLICATION_DAT); try (OutputStream out = Files.newOutputStream(appInfo)) { - SerializedApplication.write(out, mainClassBuildItem.getClassName(), buildDir, jars, bootJars); + SerializedApplication.write(out, mainClassBuildItem.getClassName(), buildDir, jars, bootJars, nonExistentResources); } runnerJar.toFile().setReadable(true, false); @@ -583,7 +639,8 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, Map> relativePaths = new HashMap<>(); for (Map.Entry> e : copiedArtifacts.entrySet()) { relativePaths.put(e.getKey(), - e.getValue().stream().map(s -> buildDir.relativize(s).toString()).collect(Collectors.toList())); + e.getValue().stream().map(s -> buildDir.relativize(s).toString().replace("\\", "/")) + .collect(Collectors.toList())); } //now we serialize the data needed to build up the reaugmentation class path @@ -606,7 +663,11 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, ObjectOutputStream obj = new ObjectOutputStream(out); List paths = new ArrayList<>(); for (AppDependency i : curateOutcomeBuildItem.getEffectiveModel().getFullDeploymentDeps()) { - paths.addAll(relativePaths.get(i.getArtifact().getKey())); + final List list = relativePaths.get(i.getArtifact().getKey()); + // some of the dependencies may have been filtered out + if (list != null) { + paths.addAll(list); + } } obj.writeObject(paths); obj.close(); @@ -616,6 +677,16 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, outputTargetBuildItem.getBuildSystemProperties().store(out, "The original build properties"); } } + + if (packageConfig.includeDependencyList) { + Path deplist = buildDir.resolve(QUARKUS_APP_DEPS); + List lines = new ArrayList<>(); + for (AppDependency i : curateOutcomeBuildItem.getEffectiveModel().getUserDependencies()) { + lines.add(i.getArtifact().toString()); + } + lines.sort(Comparator.naturalOrder()); + Files.write(deplist, lines); + } } else { //if it is a rebuild we might have classes @@ -628,7 +699,59 @@ public void accept(Path path) { } }); } - return new JarBuildItem(initJar, null, libDir, packageConfig.type); + return new JarBuildItem(initJar, null, libDir, packageConfig.type, null); + } + + private boolean downloadFernflowerJar(PackageConfig packageConfig, Path fernflowerJar) { + String downloadURL = String.format("https://jitpack.io/com/github/fesh0r/fernflower/%s/fernflower-%s.jar", + packageConfig.fernflower.hash, packageConfig.fernflower.hash); + try (BufferedInputStream in = new BufferedInputStream(new URL(downloadURL).openStream()); + FileOutputStream fileOutputStream = new FileOutputStream(fernflowerJar.toFile())) { + byte[] dataBuffer = new byte[1024]; + int bytesRead; + while ((bytesRead = in.read(dataBuffer, 0, 1024)) != -1) { + fileOutputStream.write(dataBuffer, 0, bytesRead); + } + return true; + } catch (IOException e) { + log.error("Unable to download Fernflower from " + downloadURL, e); + return false; + } + } + + private boolean decompile(Path fernflowerJar, Path decompiledOutputDir, Path jarToDecompile) { + int exitCode; + try { + ProcessBuilder processBuilder = new ProcessBuilder( + Arrays.asList("java", "-jar", fernflowerJar.toAbsolutePath().toString(), + jarToDecompile.toAbsolutePath().toString(), decompiledOutputDir.toAbsolutePath().toString())); + if (log.isDebugEnabled()) { + processBuilder.inheritIO(); + } else { + processBuilder.redirectError(NULL_FILE); + processBuilder.redirectOutput(NULL_FILE); + } + exitCode = processBuilder.start().waitFor(); + } catch (Exception e) { + log.error("Failed to launch Fernflower decompiler.", e); + return false; + } + + if (exitCode != 0) { + log.errorf("Fernflower decompiler exited with error code: %d.", exitCode); + return false; + } + + String jarFileName = jarToDecompile.getFileName().toString(); + Path decompiledJar = decompiledOutputDir.resolve(jarFileName); + try { + ZipUtils.unzip(decompiledJar, decompiledOutputDir.resolve(jarFileName.replace(".jar", ""))); + Files.deleteIfExists(decompiledJar); + } catch (IOException ignored) { + // it doesn't really matter if we can't unzip the jar as we do it merely for user convenience + } + + return true; } private void copyDependency(CurateOutcomeBuildItem curateOutcomeBuildItem, Map> runtimeArtifacts, @@ -644,51 +767,49 @@ private void copyDependency(CurateOutcomeBuildItem curateOutcomeBuildItem, Map new ArrayList<>()).add(targetPath); - } else { - final String fileName = depArtifact.getGroupId() + "." + resolvedDep.getFileName(); - final Path targetPath = libDir.resolve(fileName); - Files.copy(resolvedDep, targetPath, StandardCopyOption.REPLACE_EXISTING); - jars.add(targetPath); - runtimeArtifacts.computeIfAbsent(depArtifact.getKey(), (s) -> new ArrayList<>()).add(targetPath); - } + final String fileName = depArtifact.getGroupId() + "." + resolvedDep.getFileName(); + final Path targetPath; + + if (allowParentFirst && curateOutcomeBuildItem.getEffectiveModel().getRunnerParentFirstArtifacts() + .contains(depArtifact.getKey())) { + targetPath = baseLib.resolve(fileName); + classPath.append(" ").append(LIB).append("/").append(BOOT_LIB).append("/").append(fileName); } else { + targetPath = libDir.resolve(fileName); + jars.add(targetPath); + } + runtimeArtifacts.computeIfAbsent(depArtifact.getKey(), (s) -> new ArrayList<>(1)).add(targetPath); + + if (Files.isDirectory(resolvedDep)) { // This case can happen when we are building a jar from inside the Quarkus repository // and Quarkus Bootstrap's localProjectDiscovery has been set to true. In such a case // the non-jar dependencies are the Quarkus dependencies picked up on the file system - // these should never be parent first - - final String fileName = depArtifact.getGroupId() + "." + resolvedDep.getFileName(); - final Path targetPath = libDir.resolve(fileName); - runtimeArtifacts.computeIfAbsent(depArtifact.getKey(), (s) -> new ArrayList<>()).add(targetPath); - jars.add(targetPath); - try (FileSystem runnerZipFs = ZipUtils.newZip(targetPath)) { - Files.walkFileTree(resolvedDep, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, - new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) - throws IOException { - final Path relativePath = resolvedDep.relativize(file); - final Path targetPath = runnerZipFs.getPath(relativePath.toString()); - if (targetPath.getParent() != null) { - Files.createDirectories(targetPath.getParent()); - } - Files.copy(file, targetPath, StandardCopyOption.REPLACE_EXISTING); //replace only needed for testing - return FileVisitResult.CONTINUE; - } - }); - } + packageClasses(resolvedDep, targetPath); + } else { + Files.copy(resolvedDep, targetPath, StandardCopyOption.REPLACE_EXISTING); } } } + private void packageClasses(Path resolvedDep, final Path targetPath) throws IOException { + try (FileSystem runnerZipFs = ZipUtils.newZip(targetPath)) { + Files.walkFileTree(resolvedDep, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, + new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + final Path relativePath = resolvedDep.relativize(file); + final Path targetPath = runnerZipFs.getPath(relativePath.toString()); + if (targetPath.getParent() != null) { + Files.createDirectories(targetPath.getParent()); + } + Files.copy(file, targetPath, StandardCopyOption.REPLACE_EXISTING); //replace only needed for testing + return FileVisitResult.CONTINUE; + } + }); + } + } + /** * Native images are built from a specially created jar file. This allows for changes in how the jar file is generated. * @@ -707,8 +828,7 @@ public NativeImageSourceJarBuildItem buildNativeImageJar(CurateOutcomeBuildItem List uberJarRequired) throws Exception { Path targetDirectory = outputTargetBuildItem.getOutputDirectory() .resolve(outputTargetBuildItem.getBaseName() + "-native-image-source-jar"); - IoUtils.recursiveDelete(targetDirectory); - Files.createDirectories(targetDirectory); + IoUtils.createOrEmptyDir(targetDirectory); List allClasses = new ArrayList<>(generatedClasses); allClasses.addAll(nativeImageResources.stream() @@ -721,9 +841,14 @@ public NativeImageSourceJarBuildItem buildNativeImageJar(CurateOutcomeBuildItem "maximum command length (see https://github.com/oracle/graal/issues/2387)."); // Native image source jar generation with the uber jar strategy is provided as a workaround for Windows and // will be removed once https://github.com/oracle/graal/issues/2387 is fixed. - return buildNativeImageUberJar(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, + final NativeImageSourceJarBuildItem nativeImageSourceJarBuildItem = buildNativeImageUberJar(curateOutcomeBuildItem, + outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, packageConfig, applicationInfo, allClasses, generatedResources, mainClassBuildItem, targetDirectory); + // additionally copy any json config files to a location accessible by native-image tool during + // native-image generation + copyJsonConfigFiles(applicationArchivesBuildItem, targetDirectory); + return nativeImageSourceJarBuildItem; } else { return buildNativeImageThinJar(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, @@ -1188,4 +1313,9 @@ public boolean test(Path path, BasicFileAttributes basicFileAttributes) { return basicFileAttributes.isRegularFile() && path.toString().endsWith(".json"); } } + + // copied from Java 9 + // TODO remove when we move to Java 11 + + private static final File NULL_FILE = new File(SystemUtils.IS_OS_WINDOWS ? "NUL" : "/dev/null"); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/LinuxIDUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/LinuxIDUtil.java new file mode 100644 index 0000000000000..73130bcda97b8 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/LinuxIDUtil.java @@ -0,0 +1,61 @@ +package io.quarkus.deployment.pkg.steps; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; + +final class LinuxIDUtil { + + private LinuxIDUtil() { + } + + static String getLinuxID(String option) { + Process process; + + try { + StringBuilder responseBuilder = new StringBuilder(); + String line; + + ProcessBuilder idPB = new ProcessBuilder().command("id", option); + idPB.redirectError(new File("/dev/null")); + idPB.redirectInput(new File("/dev/null")); + + process = idPB.start(); + try (InputStream inputStream = process.getInputStream()) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + while ((line = reader.readLine()) != null) { + responseBuilder.append(line); + } + safeWaitFor(process); + return responseBuilder.toString(); + } + } catch (Throwable t) { + safeWaitFor(process); + throw t; + } + } catch (IOException e) { //from process.start() + //swallow and return null id + return null; + } + } + + private static void safeWaitFor(Process process) { + boolean intr = false; + try { + for (;;) + try { + process.waitFor(); + return; + } catch (InterruptedException ex) { + intr = true; + } + } finally { + if (intr) { + Thread.currentThread().interrupt(); + } + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java new file mode 100644 index 0000000000000..6aa30b51ea184 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java @@ -0,0 +1,137 @@ +package io.quarkus.deployment.pkg.steps; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.jboss.logging.Logger; + +import io.quarkus.deployment.pkg.NativeConfig; +import io.quarkus.deployment.util.FileUtil; +import io.quarkus.deployment.util.ProcessUtil; + +public abstract class NativeImageBuildContainerRunner extends NativeImageBuildRunner { + + private static final Logger log = Logger.getLogger(NativeImageBuildContainerRunner.class); + + final NativeConfig nativeConfig; + protected final NativeConfig.ContainerRuntime containerRuntime; + private final String[] baseContainerRuntimeArgs; + protected final String outputPath; + + public NativeImageBuildContainerRunner(NativeConfig nativeConfig, Path outputDir) { + this.nativeConfig = nativeConfig; + containerRuntime = nativeConfig.containerRuntime.orElseGet(NativeImageBuildContainerRunner::detectContainerRuntime); + log.infof("Using %s to run the native image builder", containerRuntime.getExecutableName()); + + this.baseContainerRuntimeArgs = new String[] { "--env", "LANG=C" }; + + outputPath = outputDir == null ? null : outputDir.toAbsolutePath().toString(); + + } + + @Override + public void setup(boolean processInheritIODisabled) { + if (containerRuntime == NativeConfig.ContainerRuntime.DOCKER + || containerRuntime == NativeConfig.ContainerRuntime.PODMAN) { + // we pull the docker image in order to give users an indication of which step the process is at + // it's not strictly necessary we do this, however if we don't the subsequent version command + // will appear to block and no output will be shown + log.info("Checking image status " + nativeConfig.builderImage); + Process pullProcess = null; + try { + final ProcessBuilder pb = new ProcessBuilder( + Arrays.asList(containerRuntime.getExecutableName(), "pull", nativeConfig.builderImage)); + pullProcess = ProcessUtil.launchProcess(pb, processInheritIODisabled); + pullProcess.waitFor(); + } catch (IOException | InterruptedException e) { + throw new RuntimeException("Failed to pull builder image " + nativeConfig.builderImage, e); + } finally { + if (pullProcess != null) { + pullProcess.destroy(); + } + } + } + } + + @Override + protected String[] getGraalVMVersionCommand(List args) { + return buildCommand("run", Collections.singletonList("--rm"), args); + } + + @Override + protected String[] getBuildCommand(List args) { + return buildCommand("run", getContainerRuntimeBuildArgs(), args); + } + + protected List getContainerRuntimeBuildArgs() { + List containerRuntimeArgs = new ArrayList<>(); + nativeConfig.containerRuntimeOptions.ifPresent(containerRuntimeArgs::addAll); + if (nativeConfig.debugBuildProcess && nativeConfig.publishDebugBuildProcessPort) { + // publish the debug port onto the host if asked for + containerRuntimeArgs.add("--publish=" + NativeImageBuildStep.DEBUG_BUILD_PROCESS_PORT + ":" + + NativeImageBuildStep.DEBUG_BUILD_PROCESS_PORT); + } + return containerRuntimeArgs; + } + + protected String[] buildCommand(String dockerCmd, List containerRuntimeArgs, List command) { + return Stream + .of(Stream.of(containerRuntime.getExecutableName()), Stream.of(dockerCmd), Stream.of(baseContainerRuntimeArgs), + containerRuntimeArgs.stream(), Stream.of(nativeConfig.builderImage), command.stream()) + .flatMap(Function.identity()).toArray(String[]::new); + } + + /** + * @return {@link NativeConfig.ContainerRuntime#DOCKER} if it's available, or {@link NativeConfig.ContainerRuntime#PODMAN} + * if the podman + * executable exists in the environment or if the docker executable is an alias to podman + * @throws IllegalStateException if no container runtime was found to build the image + */ + private static NativeConfig.ContainerRuntime detectContainerRuntime() { + // Docker version 19.03.14, build 5eb3275d40 + String dockerVersionOutput = getVersionOutputFor(NativeConfig.ContainerRuntime.DOCKER); + boolean dockerAvailable = dockerVersionOutput.contains("Docker version"); + // Check if Podman is installed + // podman version 2.1.1 + String podmanVersionOutput = getVersionOutputFor(NativeConfig.ContainerRuntime.PODMAN); + boolean podmanAvailable = podmanVersionOutput.startsWith("podman version"); + if (dockerAvailable) { + // Check if "docker" is an alias to "podman" + if (dockerVersionOutput.equals(podmanVersionOutput)) { + return NativeConfig.ContainerRuntime.PODMAN; + } + return NativeConfig.ContainerRuntime.DOCKER; + } else if (podmanAvailable) { + return NativeConfig.ContainerRuntime.PODMAN; + } else { + throw new IllegalStateException("No container runtime was found to run the native image builder"); + } + } + + private static String getVersionOutputFor(NativeConfig.ContainerRuntime containerRuntime) { + Process versionProcess = null; + try { + ProcessBuilder pb = new ProcessBuilder(containerRuntime.getExecutableName(), "--version") + .redirectErrorStream(true); + versionProcess = pb.start(); + versionProcess.waitFor(); + return new String(FileUtil.readFileContents(versionProcess.getInputStream()), StandardCharsets.UTF_8); + } catch (IOException | InterruptedException e) { + // If an exception is thrown in the process, just return an empty String + log.debugf(e, "Failure to read version output from %s", containerRuntime.getExecutableName()); + return ""; + } finally { + if (versionProcess != null) { + versionProcess.destroy(); + } + } + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java new file mode 100644 index 0000000000000..ec8e6ee6cb4e2 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java @@ -0,0 +1,42 @@ +package io.quarkus.deployment.pkg.steps; + +import static io.quarkus.deployment.pkg.steps.LinuxIDUtil.getLinuxID; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; + +import org.apache.commons.lang3.SystemUtils; + +import io.quarkus.deployment.pkg.NativeConfig; +import io.quarkus.deployment.util.FileUtil; + +public class NativeImageBuildLocalContainerRunner extends NativeImageBuildContainerRunner { + + public NativeImageBuildLocalContainerRunner(NativeConfig nativeConfig, Path outputDir) { + super(nativeConfig, outputDir); + } + + @Override + protected List getContainerRuntimeBuildArgs() { + List containerRuntimeArgs = super.getContainerRuntimeBuildArgs(); + String volumeOutputPath = outputPath; + if (SystemUtils.IS_OS_WINDOWS) { + volumeOutputPath = FileUtil.translateToVolumePath(volumeOutputPath); + } else if (SystemUtils.IS_OS_LINUX) { + String uid = getLinuxID("-ur"); + String gid = getLinuxID("-gr"); + if (uid != null && gid != null && !uid.isEmpty() && !gid.isEmpty()) { + Collections.addAll(containerRuntimeArgs, "--user", uid + ":" + gid); + if (containerRuntime == NativeConfig.ContainerRuntime.PODMAN) { + // Needed to avoid AccessDeniedExceptions + containerRuntimeArgs.add("--userns=keep-id"); + } + } + } + + Collections.addAll(containerRuntimeArgs, "--rm", "-v", + volumeOutputPath + ":" + NativeImageBuildStep.CONTAINER_BUILD_VOLUME_PATH + ":z"); + return containerRuntimeArgs; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalRunner.java new file mode 100644 index 0000000000000..bbea0c4099a9e --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalRunner.java @@ -0,0 +1,36 @@ +package io.quarkus.deployment.pkg.steps; + +import java.io.File; +import java.io.IOException; +import java.util.List; +import java.util.stream.Stream; + +public class NativeImageBuildLocalRunner extends NativeImageBuildRunner { + + private final String nativeImageExecutable; + + public NativeImageBuildLocalRunner(String nativeImageExecutable) { + this.nativeImageExecutable = nativeImageExecutable; + } + + @Override + public void cleanupServer(File outputDir) throws InterruptedException, IOException { + final String[] cleanupCommand = { nativeImageExecutable, "--server-shutdown" }; + runCommand(cleanupCommand, null, outputDir); + } + + @Override + protected String[] getGraalVMVersionCommand(List args) { + return buildCommand(args); + } + + @Override + protected String[] getBuildCommand(List args) { + return buildCommand(args); + } + + private String[] buildCommand(List args) { + return Stream.concat(Stream.of(nativeImageExecutable), args.stream()).toArray(String[]::new); + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRemoteContainerRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRemoteContainerRunner.java new file mode 100644 index 0000000000000..34c20b4b2a31f --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRemoteContainerRunner.java @@ -0,0 +1,64 @@ +package io.quarkus.deployment.pkg.steps; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.file.Path; +import java.util.List; + +import org.jboss.logging.Logger; + +import io.quarkus.deployment.pkg.NativeConfig; + +public class NativeImageBuildRemoteContainerRunner extends NativeImageBuildContainerRunner { + + private static final Logger log = Logger.getLogger(NativeImageBuildRemoteContainerRunner.class); + + private final String resultingExecutableName; + private String containerId; + + public NativeImageBuildRemoteContainerRunner(NativeConfig nativeConfig, Path outputDir, String resultingExecutableName) { + super(nativeConfig, outputDir); + this.resultingExecutableName = resultingExecutableName; + } + + @Override + protected void preBuild(List buildArgs) throws InterruptedException, IOException { + List containerRuntimeArgs = getContainerRuntimeBuildArgs(); + String[] createContainerCommand = buildCommand("create", containerRuntimeArgs, buildArgs); + log.info(String.join(" ", createContainerCommand).replace("$", "\\$")); + Process createContainerProcess = new ProcessBuilder(createContainerCommand).start(); + if (createContainerProcess.waitFor() != 0) { + throw new RuntimeException("Failed to create builder container."); + } + try (BufferedReader reader = new BufferedReader(new InputStreamReader(createContainerProcess.getInputStream()))) { + containerId = reader.readLine(); + } + String[] copyCommand = new String[] { containerRuntime.getExecutableName(), "cp", outputPath + "/.", + containerId + ":" + NativeImageBuildStep.CONTAINER_BUILD_VOLUME_PATH }; + runCommand(copyCommand, "Failed to copy source-jar and libs from host to builder container", null); + super.preBuild(buildArgs); + } + + @Override + protected String[] getBuildCommand(List args) { + return new String[] { containerRuntime.getExecutableName(), "start", "--attach", containerId }; + } + + @Override + protected void postBuild() throws InterruptedException, IOException { + copyFromBuilder(resultingExecutableName, "Failed to copy native executable from container back to the host."); + if (nativeConfig.debug.enabled) { + copyFromBuilder("sources", "Failed to copy sources from container back to the host."); + } + String[] removeCommand = new String[] { containerRuntime.getExecutableName(), "container", "rm", "--volumes", + containerId }; + runCommand(removeCommand, "Failed to remove container: " + containerId, null); + } + + private void copyFromBuilder(String path, String errorMsg) throws IOException, InterruptedException { + String[] copyCommand = new String[] { containerRuntime.getExecutableName(), "cp", + containerId + ":" + NativeImageBuildStep.CONTAINER_BUILD_VOLUME_PATH + "/" + path, outputPath }; + runCommand(copyCommand, errorMsg, null); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRunner.java new file mode 100644 index 0000000000000..d3adaf2d0d220 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildRunner.java @@ -0,0 +1,117 @@ +package io.quarkus.deployment.pkg.steps; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import org.jboss.logging.Logger; + +import io.quarkus.deployment.pkg.steps.NativeImageBuildStep.GraalVM; +import io.quarkus.deployment.util.ProcessUtil; + +public abstract class NativeImageBuildRunner { + + private static final Logger log = Logger.getLogger(NativeImageBuildRunner.class); + + public GraalVM.Version getGraalVMVersion() { + final GraalVM.Version graalVMVersion; + try { + String[] versionCommand = getGraalVMVersionCommand(Collections.singletonList("--version")); + log.debugf(String.join(" ", versionCommand).replace("$", "\\$")); + Process versionProcess = new ProcessBuilder(versionCommand) + .redirectErrorStream(true) + .start(); + versionProcess.waitFor(); + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(versionProcess.getInputStream(), StandardCharsets.UTF_8))) { + graalVMVersion = GraalVM.Version.of(reader.lines()); + } + } catch (Exception e) { + throw new RuntimeException("Failed to get GraalVM version", e); + } + return graalVMVersion; + } + + public void setup(boolean processInheritIODisabled) { + } + + public void cleanupServer(File outputDir) throws InterruptedException, IOException { + } + + public int build(List args, Path outputDir, boolean processInheritIODisabled) + throws InterruptedException, IOException { + preBuild(args); + try { + CountDownLatch errorReportLatch = new CountDownLatch(1); + final String[] buildCommand = getBuildCommand(args); + final ProcessBuilder processBuilder = new ProcessBuilder(buildCommand) + .directory(outputDir.toFile()); + log.info(String.join(" ", buildCommand).replace("$", "\\$")); + final Process process = ProcessUtil.launchProcessStreamStdOut(processBuilder, processInheritIODisabled); + ExecutorService executor = Executors.newSingleThreadExecutor(); + executor.submit(new ErrorReplacingProcessReader(process.getErrorStream(), outputDir.resolve("reports").toFile(), + errorReportLatch)); + executor.shutdown(); + errorReportLatch.await(); + return process.waitFor(); + } finally { + postBuild(); + } + } + + protected abstract String[] getGraalVMVersionCommand(List args); + + protected abstract String[] getBuildCommand(List args); + + protected void preBuild(List buildArgs) throws IOException, InterruptedException { + } + + protected void postBuild() throws InterruptedException, IOException { + } + + /** + * Run {@code command} in {@code workingDirectory} and log error if {@code errorMsg} is not null. + * + * @param command The command to run + * @param errorMsg The error message to be printed in case of failure. + * If {@code null} the failure is ignored, but logged. + * @param workingDirectory The directory in which to run the command + */ + void runCommand(String[] command, String errorMsg, File workingDirectory) { + log.info(String.join(" ", command).replace("$", "\\$")); + Process process = null; + try { + final ProcessBuilder processBuilder = new ProcessBuilder(command); + if (workingDirectory != null) { + processBuilder.directory(workingDirectory); + } + process = processBuilder.start(); + final int exitCode = process.waitFor(); + if (exitCode != 0) { + if (errorMsg != null) { + log.error(errorMsg); + } else { + log.debugf("Command: " + String.join(" ", command) + " failed with exit code " + exitCode); + } + } + } catch (IOException | InterruptedException e) { + if (errorMsg != null) { + log.error(errorMsg); + } else { + log.debugf(e, "Command: " + String.join(" ", command) + " failed."); + } + } finally { + if (process != null) { + process.destroy(); + } + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStep.java index aaf9e5d89bdc9..b8f59c561217b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStep.java @@ -1,48 +1,47 @@ package io.quarkus.deployment.pkg.steps; -import java.io.BufferedReader; import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; +import java.util.Iterator; import java.util.List; -import java.util.Map; +import java.util.Objects; import java.util.Optional; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Stream; import org.apache.commons.lang3.SystemUtils; import org.jboss.logging.Logger; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; import io.quarkus.bootstrap.util.IoUtils; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.nativeimage.NativeImageSystemPropertyBuildItem; import io.quarkus.deployment.pkg.NativeConfig; import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; +import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; +import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageSourceJarBuildItem; import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; import io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled; -import io.quarkus.deployment.util.FileUtil; -import io.quarkus.deployment.util.GlobUtil; -import io.quarkus.deployment.util.ProcessUtil; public class NativeImageBuildStep { private static final Logger log = Logger.getLogger(NativeImageBuildStep.class); - private static final String DEBUG_BUILD_PROCESS_PORT = "5005"; - private static final String GRAALVM_HOME = "GRAALVM_HOME"; + public static final String DEBUG_BUILD_PROCESS_PORT = "5005"; /** * Name of the system property to retrieve JAVA_HOME @@ -61,351 +60,281 @@ public class NativeImageBuildStep { private static final int OOM_ERROR_VALUE = 137; private static final String QUARKUS_XMX_PROPERTY = "quarkus.native.native-image-xmx"; - private static final String CONTAINER_BUILD_VOLUME_PATH = "/project"; + public static final String CONTAINER_BUILD_VOLUME_PATH = "/project"; private static final String TRUST_STORE_SYSTEM_PROPERTY_MARKER = "-Djavax.net.ssl.trustStore="; private static final String MOVED_TRUST_STORE_NAME = "trustStore"; + public static final String APP_SOURCES = "app-sources"; @BuildStep(onlyIf = NativeBuild.class) ArtifactResultBuildItem result(NativeImageBuildItem image) { return new ArtifactResultBuildItem(image.getPath(), PackageConfig.NATIVE, Collections.emptyMap()); } + @BuildStep(onlyIf = NativeSourcesBuild.class) + ArtifactResultBuildItem nativeSourcesResult(NativeConfig nativeConfig, + BuildSystemTargetBuildItem buildSystemTargetBuildItem, + NativeImageSourceJarBuildItem nativeImageSourceJarBuildItem, + OutputTargetBuildItem outputTargetBuildItem, + PackageConfig packageConfig, + List nativeImageProperties) { + + Path outputDir; + try { + outputDir = buildSystemTargetBuildItem.getOutputDirectory().resolve("native-sources"); + IoUtils.createOrEmptyDir(outputDir); + IoUtils.copy(nativeImageSourceJarBuildItem.getPath().getParent(), outputDir); + } catch (IOException e) { + throw new UncheckedIOException("Unable to create native-sources output directory", e); + } + + Path runnerJar = outputDir.resolve(nativeImageSourceJarBuildItem.getPath().getFileName()); + + String nativeImageName = getNativeImageName(outputTargetBuildItem, packageConfig); + + NativeImageInvokerInfo nativeImageArgs = new NativeImageInvokerInfo.Builder() + .setNativeConfig(nativeConfig) + .setOutputTargetBuildItem(outputTargetBuildItem) + .setNativeImageProperties(nativeImageProperties) + .setOutputDir(outputDir) + .setRunnerJarName(runnerJar.getFileName().toString()) + // the path to native-image is not known now, it is only known at the time the native-sources will be consumed + .setNativeImageName(nativeImageName) + .setContainerBuild(nativeConfig.containerRuntime.isPresent() || nativeConfig.containerBuild) + .build(); + List command = nativeImageArgs.getArgs(); + try (FileOutputStream commandFOS = new FileOutputStream(outputDir.resolve("native-image.args").toFile())) { + String commandStr = String.join(" ", command); + commandFOS.write(commandStr.getBytes(StandardCharsets.UTF_8)); + + log.info("The sources for a subsequent native-image run along with the necessary arguments can be found in " + + outputDir); + } catch (Exception e) { + throw new RuntimeException("Failed to build native image sources", e); + } + + // drop the original output to avoid confusion + IoUtils.recursiveDelete(nativeImageSourceJarBuildItem.getPath().getParent()); + + return new ArtifactResultBuildItem(nativeImageSourceJarBuildItem.getPath(), PackageConfig.NATIVE_SOURCES, + Collections.emptyMap()); + } + @BuildStep public NativeImageBuildItem build(NativeConfig nativeConfig, NativeImageSourceJarBuildItem nativeImageSourceJarBuildItem, OutputTargetBuildItem outputTargetBuildItem, PackageConfig packageConfig, + CurateOutcomeBuildItem curateOutcomeBuildItem, List nativeImageProperties, - final Optional processInheritIODisabled) { + Optional processInheritIODisabled) { + if (nativeConfig.debug.enabled) { + copyJarSourcesToLib(outputTargetBuildItem, curateOutcomeBuildItem); + copySourcesToSourceCache(outputTargetBuildItem); + } + Path runnerJar = nativeImageSourceJarBuildItem.getPath(); log.info("Building native image from " + runnerJar); Path outputDir = nativeImageSourceJarBuildItem.getPath().getParent(); final String runnerJarName = runnerJar.getFileName().toString(); - HashMap env = new HashMap<>(System.getenv()); - List nativeImage; - String noPIE = ""; - boolean isContainerBuild = nativeConfig.containerRuntime.isPresent() || nativeConfig.containerBuild; - if (isContainerBuild) { - String containerRuntime = nativeConfig.containerRuntime.orElse("docker"); - // E.g. "/usr/bin/docker run -v {{PROJECT_DIR}}:/project --rm quarkus/graalvm-native-image" - nativeImage = new ArrayList<>(); - - String outputPath = outputDir.toAbsolutePath().toString(); - if (SystemUtils.IS_OS_WINDOWS) { - outputPath = FileUtil.translateToVolumePath(outputPath); - } - Collections.addAll(nativeImage, containerRuntime, "run", "-v", - outputPath + ":" + CONTAINER_BUILD_VOLUME_PATH + ":z", "--env", "LANG=C"); - - if (SystemUtils.IS_OS_LINUX) { - if ("docker".equals(containerRuntime)) { - String uid = getLinuxID("-ur"); - String gid = getLinuxID("-gr"); - if (uid != null && gid != null && !"".equals(uid) && !"".equals(gid)) { - Collections.addAll(nativeImage, "--user", uid + ":" + gid); - } - } else if ("podman".equals(containerRuntime)) { - // Needed to avoid AccessDeniedExceptions - nativeImage.add("--userns=keep-id"); - } - } - nativeConfig.containerRuntimeOptions.ifPresent(nativeImage::addAll); - if (nativeConfig.debugBuildProcess && nativeConfig.publishDebugBuildProcessPort) { - // publish the debug port onto the host if asked for - nativeImage.add("--publish=" + DEBUG_BUILD_PROCESS_PORT + ":" + DEBUG_BUILD_PROCESS_PORT); - } - Collections.addAll(nativeImage, "--rm", nativeConfig.builderImage); - - if ("docker".equals(containerRuntime) || "podman".equals(containerRuntime)) { - // we pull the docker image in order to give users an indication of which step the process is at - // it's not strictly necessary we do this, however if we don't the subsequent version command - // will appear to block and no output will be shown - log.info("Checking image status " + nativeConfig.builderImage); - Process pullProcess = null; - try { - final ProcessBuilder pb = new ProcessBuilder( - Arrays.asList(containerRuntime, "pull", nativeConfig.builderImage)); - pullProcess = ProcessUtil.launchProcess(pb, processInheritIODisabled); - pullProcess.waitFor(); - } catch (IOException | InterruptedException e) { - throw new RuntimeException("Failed to pull builder image " + nativeConfig.builderImage, e); - } finally { - if (pullProcess != null) { - pullProcess.destroy(); - } - } - } - - } else { - if (SystemUtils.IS_OS_LINUX) { - noPIE = detectNoPIE(); - } - - Optional graal = nativeConfig.graalvmHome; - File java = nativeConfig.javaHome; - if (graal.isPresent()) { - env.put(GRAALVM_HOME, graal.get()); - } - if (java == null) { - // try system property first - it will be the JAVA_HOME used by the current JVM - String home = System.getProperty(JAVA_HOME_SYS); - if (home == null) { - // No luck, somewhat a odd JVM not enforcing this property - // try with the JAVA_HOME environment variable - home = env.get(JAVA_HOME_ENV); - } - - if (home != null) { - java = new File(home); - } - } - nativeImage = Collections.singletonList(getNativeImageExecutable(graal, java, env).getAbsolutePath()); + boolean isContainerBuild = isContainerBuild(nativeConfig); + if (!isContainerBuild && SystemUtils.IS_OS_LINUX) { + noPIE = detectNoPIE(); } - final Optional graalVMVersion; - - try { - List versionCommand = new ArrayList<>(nativeImage); - versionCommand.add("--version"); + String nativeImageName = getNativeImageName(outputTargetBuildItem, packageConfig); + String resultingExecutableName = getResultingExecutableName(nativeImageName, isContainerBuild); - Process versionProcess = new ProcessBuilder(versionCommand.toArray(new String[0])) - .redirectErrorStream(true) - .start(); - versionProcess.waitFor(); - try (BufferedReader reader = new BufferedReader( - new InputStreamReader(versionProcess.getInputStream(), StandardCharsets.UTF_8))) { - graalVMVersion = reader.lines().filter((l) -> l.startsWith("GraalVM Version")).findFirst(); - } - } catch (Exception e) { - throw new RuntimeException("Failed to get GraalVM version", e); - } + NativeImageBuildRunner buildRunner = getNativeImageBuildRunner(nativeConfig, outputDir, resultingExecutableName); + buildRunner.setup(processInheritIODisabled.isPresent()); + final GraalVM.Version graalVMVersion = buildRunner.getGraalVMVersion(); - if (graalVMVersion.isPresent()) { - checkGraalVMVersion(graalVMVersion.get()); + if (graalVMVersion.isDetected()) { + checkGraalVMVersion(graalVMVersion); } else { log.error("Unable to get GraalVM version from the native-image binary."); } try { - List command = new ArrayList<>(nativeImage); - if (nativeConfig.cleanupServer) { - List cleanup = new ArrayList<>(nativeImage); - cleanup.add("--server-shutdown"); - final ProcessBuilder pb = new ProcessBuilder(cleanup.toArray(new String[0])); - pb.directory(outputDir.toFile()); - final Process process = ProcessUtil.launchProcess(pb, processInheritIODisabled); - process.waitFor(); - } - Boolean enableSslNative = false; - for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) { - //todo: this should be specific build items - if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) { - enableSslNative = Boolean.parseBoolean(prop.getValue()); - } else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) { - log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false." - + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" - + " will be removed in a future Quarkus version."); - } else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) { - nativeConfig.enableAllSecurityServices |= Boolean.parseBoolean(prop.getValue()); - } else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) { - nativeConfig.addAllCharsets |= Boolean.parseBoolean(prop.getValue()); - } else if (prop.getKey().equals("quarkus.native.enable-all-timezones") && prop.getValue() != null) { - nativeConfig.includeAllTimeZones |= Boolean.parseBoolean(prop.getValue()); - } else { - // todo maybe just -D is better than -J-D in this case - if (prop.getValue() == null) { - command.add("-J-D" + prop.getKey()); - } else { - command.add("-J-D" + prop.getKey() + "=" + prop.getValue()); - } - } + if (nativeConfig.cleanupServer && !graalVMVersion.isMandrel()) { + buildRunner.cleanupServer(outputDir.toFile()); } - command.add("-J-Duser.language=" + System.getProperty("user.language")); - // Native image runtime uses the host's (i.e. build time) value of file.encoding - // system property. We intentionally default this to UTF-8 to avoid platform specific - // defaults to be picked up which can then result in inconsistent behaviour in the - // generated native application - command.add("-J-Dfile.encoding=UTF-8"); - if (enableSslNative) { - nativeConfig.enableHttpsUrlHandler = true; - nativeConfig.enableAllSecurityServices = true; - } - - handleAdditionalProperties(nativeConfig, command, isContainerBuild, outputDir); - nativeConfig.resources.includes.ifPresent(l -> l.stream() - .map(GlobUtil::toRegexPattern) - .map(re -> "-H:IncludeResources=" + re.trim()) - .forEach(command::add)); - command.add("--initialize-at-build-time="); - command.add("-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime"); //the default collection policy results in full GC's 50% of the time - command.add("-H:+JNI"); - command.add("-jar"); - command.add(runnerJarName); + NativeImageInvokerInfo commandAndExecutable = new NativeImageInvokerInfo.Builder() + .setNativeConfig(nativeConfig) + .setOutputTargetBuildItem(outputTargetBuildItem) + .setNativeImageProperties(nativeImageProperties) + .setOutputDir(outputDir) + .setRunnerJarName(runnerJarName) + .setNativeImageName(nativeImageName) + .setNoPIE(noPIE) + .setContainerBuild(isContainerBuild) + .setGraalVMVersion(graalVMVersion) + .build(); - if (nativeConfig.enableFallbackImages) { - command.add("-H:FallbackThreshold=5"); - } else { - //Default: be strict as those fallback images aren't very useful - //and tend to cover up real problems. - command.add("-H:FallbackThreshold=0"); - } - - if (nativeConfig.reportErrorsAtRuntime) { - command.add("-H:+ReportUnsupportedElementsAtRuntime"); - } - if (nativeConfig.reportExceptionStackTraces) { - command.add("-H:+ReportExceptionStackTraces"); - } - if (nativeConfig.debug.enabled) { - command.add("-H:GenerateDebugInfo=1"); - } - if (nativeConfig.debugBuildProcess) { - command.add("-J-Xrunjdwp:transport=dt_socket,address=" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y"); - } - if (nativeConfig.enableReports) { - command.add("-H:+PrintAnalysisCallTree"); - } - if (nativeConfig.dumpProxies) { - command.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true"); - if (nativeConfig.enableServer) { - log.warn( - "Options dumpProxies and enableServer are both enabled: this will get the proxies dumped in an unknown external working directory"); - } - } - if (nativeConfig.nativeImageXmx.isPresent()) { - command.add("-J-Xmx" + nativeConfig.nativeImageXmx.get()); - } - List protocols = new ArrayList<>(2); - if (nativeConfig.enableHttpUrlHandler) { - protocols.add("http"); - } - if (nativeConfig.enableHttpsUrlHandler) { - protocols.add("https"); - } - if (nativeConfig.addAllCharsets) { - command.add("-H:+AddAllCharsets"); - } else { - command.add("-H:-AddAllCharsets"); - } - //if 'includeAllTimeZones' is set, don't request it explicitly as native-image will log a warning about this being now the default. - //(But still disable it when necessary) - if (!nativeConfig.includeAllTimeZones) { - command.add("-H:-IncludeAllTimeZones"); - } - if (!protocols.isEmpty()) { - command.add("-H:EnableURLProtocols=" + String.join(",", protocols)); - } - if (nativeConfig.enableAllSecurityServices) { - command.add("--enable-all-security-services"); - } - if (!noPIE.isEmpty()) { - command.add("-H:NativeLinkerOption=" + noPIE); - } + List nativeImageArgs = commandAndExecutable.args; - if (!nativeConfig.enableIsolates) { - command.add("-H:-SpawnIsolates"); - } - if (!nativeConfig.enableJni) { - log.warn("Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false." - + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" - + " will be removed in a future Quarkus version."); - } - if (!nativeConfig.enableServer && !SystemUtils.IS_OS_WINDOWS) { - command.add("--no-server"); - } - if (nativeConfig.enableVmInspection) { - command.add("-H:+AllowVMInspection"); - } - if (nativeConfig.autoServiceLoaderRegistration) { - command.add("-H:+UseServiceLoaderFeature"); - //When enabling, at least print what exactly is being added: - command.add("-H:+TraceServiceLoaderFeature"); - } else { - command.add("-H:-UseServiceLoaderFeature"); - } - if (nativeConfig.fullStackTraces) { - command.add("-H:+StackTrace"); - } else { - command.add("-H:-StackTrace"); - } - String executableName = outputTargetBuildItem.getBaseName() + packageConfig.runnerSuffix; - command.add(executableName); - - log.info(String.join(" ", command)); - CountDownLatch errorReportLatch = new CountDownLatch(1); - final ProcessBuilder processBuilder = new ProcessBuilder(command).directory(outputDir.toFile()); - final Process process = ProcessUtil.launchProcessStreamStdOut(processBuilder, processInheritIODisabled); - ExecutorService executor = Executors.newSingleThreadExecutor(); - executor.submit(new ErrorReplacingProcessReader(process.getErrorStream(), outputDir.resolve("reports").toFile(), - errorReportLatch)); - executor.shutdown(); - errorReportLatch.await(); - int exitCode = process.waitFor(); + int exitCode = buildRunner.build(nativeImageArgs, outputDir, processInheritIODisabled.isPresent()); if (exitCode != 0) { - throw imageGenerationFailed(exitCode, command); + throw imageGenerationFailed(exitCode, nativeImageArgs); } - if (SystemUtils.IS_OS_WINDOWS && !(isContainerBuild)) { - //once image is generated it gets added .exe on Windows - executableName = executableName + ".exe"; + Path generatedExecutablePath = outputDir.resolve(resultingExecutableName); + Path finalExecutablePath = outputTargetBuildItem.getOutputDirectory().resolve(resultingExecutableName); + IoUtils.copy(generatedExecutablePath, finalExecutablePath); + Files.delete(generatedExecutablePath); + if (nativeConfig.debug.enabled) { + final String sources = "sources"; + final Path generatedSources = outputDir.resolve(sources); + final Path finalSources = outputTargetBuildItem.getOutputDirectory().resolve(sources); + IoUtils.copy(generatedSources, finalSources); + IoUtils.recursiveDelete(generatedSources); } - Path generatedImage = outputDir.resolve(executableName); - Path finalPath = outputTargetBuildItem.getOutputDirectory().resolve(executableName); - IoUtils.copy(generatedImage, finalPath); - Files.delete(generatedImage); - System.setProperty("native.image.path", finalPath.toAbsolutePath().toString()); + System.setProperty("native.image.path", finalExecutablePath.toAbsolutePath().toString()); - if (objcopyExists(env)) { + if (objcopyExists()) { if (nativeConfig.debug.enabled) { - splitDebugSymbols(finalPath); + splitDebugSymbols(finalExecutablePath); } // Strip debug symbols regardless, because the underlying JDK might contain them - objcopy("--strip-debug", finalPath.toString()); + objcopy("--strip-debug", finalExecutablePath.toString()); + } else { + log.warn("objcopy executable not found in PATH. Debug symbols will not be separated from executable."); + log.warn("That will result in a larger native image with debug symbols embedded in it."); } - return new NativeImageBuildItem(finalPath); + return new NativeImageBuildItem(finalExecutablePath); } catch (Exception e) { throw new RuntimeException("Failed to build native image", e); + } finally { + if (nativeConfig.debug.enabled) { + removeJarSourcesFromLib(outputTargetBuildItem); + IoUtils.recursiveDelete(outputDir.resolve(Paths.get(APP_SOURCES))); + } } } - private void handleAdditionalProperties(NativeConfig nativeConfig, List command, boolean isContainerBuild, - Path outputDir) { - if (nativeConfig.additionalBuildArgs.isPresent()) { - List strings = nativeConfig.additionalBuildArgs.get(); - for (String buildArg : strings) { - String trimmedBuildArg = buildArg.trim(); - if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && isContainerBuild) { - /* - * When the native binary is being built with a docker container, because a volume is created, - * we need to copy the trustStore file into the output directory (which is the root of volume) - * and change the value of 'javax.net.ssl.trustStore' property to point to this value - * - * TODO: we might want to introduce a dedicated property in order to overcome this ugliness - */ - int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER); - if (trimmedBuildArg.length() > index + 2) { - String configuredTrustStorePath = trimmedBuildArg - .substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length()); - try { - IoUtils.copy(Paths.get(configuredTrustStorePath), outputDir.resolve(MOVED_TRUST_STORE_NAME)); - command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER - + CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME); - } catch (IOException e) { - throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath - + "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e); + private String getNativeImageName(OutputTargetBuildItem outputTargetBuildItem, PackageConfig packageConfig) { + return outputTargetBuildItem.getBaseName() + packageConfig.runnerSuffix; + } + + private String getResultingExecutableName(String nativeImageName, boolean isContainerBuild) { + String resultingExecutableName = nativeImageName; + if (SystemUtils.IS_OS_WINDOWS && !isContainerBuild) { + //once image is generated it gets added .exe on Windows + resultingExecutableName = resultingExecutableName + ".exe"; + } + return resultingExecutableName; + } + + public static boolean isContainerBuild(NativeConfig nativeConfig) { + return nativeConfig.containerRuntime.isPresent() || nativeConfig.containerBuild || nativeConfig.remoteContainerBuild; + } + + private static NativeImageBuildRunner getNativeImageBuildRunner(NativeConfig nativeConfig, Path outputDir, + String resultingExecutableName) { + if (!isContainerBuild(nativeConfig)) { + NativeImageBuildLocalRunner localRunner = getNativeImageBuildLocalRunner(nativeConfig); + if (localRunner != null) { + return localRunner; + } + String executableName = getNativeImageExecutableName(); + String errorMessage = "Cannot find the `" + executableName + + "` in the GRAALVM_HOME, JAVA_HOME and System PATH. Install it using `gu install native-image`"; + if (!SystemUtils.IS_OS_LINUX) { + throw new RuntimeException(errorMessage); + } + log.warn(errorMessage + " Attempting to fall back to container build."); + } + if (nativeConfig.remoteContainerBuild) { + return new NativeImageBuildRemoteContainerRunner(nativeConfig, outputDir, resultingExecutableName); + } + return new NativeImageBuildLocalContainerRunner(nativeConfig, outputDir); + } + + private void copyJarSourcesToLib(OutputTargetBuildItem outputTargetBuildItem, + CurateOutcomeBuildItem curateOutcomeBuildItem) { + Path targetDirectory = outputTargetBuildItem.getOutputDirectory() + .resolve(outputTargetBuildItem.getBaseName() + "-native-image-source-jar"); + Path libDir = targetDirectory.resolve(JarResultBuildStep.LIB); + File libDirFile = libDir.toFile(); + if (!libDirFile.exists()) { + libDirFile.mkdirs(); + } + + final List appDeps = curateOutcomeBuildItem.getEffectiveModel().getUserDependencies(); + for (AppDependency appDep : appDeps) { + final AppArtifact depArtifact = appDep.getArtifact(); + if (depArtifact.getType().equals("jar")) { + for (Path resolvedDep : depArtifact.getPaths()) { + if (!Files.isDirectory(resolvedDep)) { + // Do we need to handle transformed classes? + // Their bytecode might have been modified but is there source for such modification? + final Path jarSourceDep = toJarSource(resolvedDep); + if (jarSourceDep.toFile().exists()) { + final String fileName = depArtifact.getGroupId() + "." + jarSourceDep.getFileName(); + final Path targetPath = libDir.resolve(fileName); + try { + Files.copy(jarSourceDep, targetPath, StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new RuntimeException("Unable to copy from " + jarSourceDep + " to " + targetPath, e); + } } } - } else { - command.add(trimmedBuildArg); } } } } + private static Path toJarSource(Path path) { + final Path parent = path.getParent(); + final String fileName = path.getFileName().toString(); + final int extensionIndex = fileName.lastIndexOf('.'); + final String sourcesFileName = String.format("%s-sources.jar", fileName.substring(0, extensionIndex)); + return parent.resolve(sourcesFileName); + } + + private void removeJarSourcesFromLib(OutputTargetBuildItem outputTargetBuildItem) { + Path targetDirectory = outputTargetBuildItem.getOutputDirectory() + .resolve(outputTargetBuildItem.getBaseName() + "-native-image-source-jar"); + Path libDir = targetDirectory.resolve(JarResultBuildStep.LIB); + + final File[] jarSources = libDir.toFile() + .listFiles((file, name) -> name.endsWith("-sources.jar")); + Stream.of(Objects.requireNonNull(jarSources)).forEach(File::delete); + } + + private static void copySourcesToSourceCache(OutputTargetBuildItem outputTargetBuildItem) { + Path targetDirectory = outputTargetBuildItem.getOutputDirectory() + .resolve(outputTargetBuildItem.getBaseName() + "-native-image-source-jar"); + + final Path targetSrc = targetDirectory.resolve(Paths.get(APP_SOURCES)); + final File targetSrcFile = targetSrc.toFile(); + if (!targetSrcFile.exists()) { + targetSrcFile.mkdirs(); + } + + final Path javaSourcesPath = outputTargetBuildItem.getOutputDirectory().resolve( + Paths.get("..", "src", "main", "java")); + + if (Files.exists(javaSourcesPath)) { + try (Stream paths = Files.walk(javaSourcesPath)) { + paths.forEach(path -> { + Path targetPath = Paths.get(targetSrc.toString(), + path.toString().substring(javaSourcesPath.toString().length())); + try { + Files.copy(path, targetPath, StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new UncheckedIOException("Unable to copy from " + path + " to " + targetPath, e); + } + }); + } catch (IOException e) { + throw new UncheckedIOException("Unable to walk path " + javaSourcesPath, e); + } + } + } + private RuntimeException imageGenerationFailed(int exitValue, List command) { if (exitValue == OOM_ERROR_VALUE) { if (command.contains("docker") && !SystemUtils.IS_OS_LINUX) { @@ -422,96 +351,67 @@ private RuntimeException imageGenerationFailed(int exitValue, List comma } } - private void checkGraalVMVersion(String version) { - log.info("Running Quarkus native-image plugin on " + version); - final List obsoleteGraalVmVersions = Arrays.asList("1.0.0", "19.0.", "19.1.", "19.2.", "19.3.", "20.0."); - final boolean vmVersionIsObsolete = obsoleteGraalVmVersions.stream().anyMatch(v -> version.contains(" " + v)); - if (vmVersionIsObsolete) { - throw new IllegalStateException("Out of date version of GraalVM detected: " + version + "." - + " Quarkus currently supports 20.1.0. Please upgrade GraalVM to this version."); + private void checkGraalVMVersion(GraalVM.Version version) { + log.info("Running Quarkus native-image plugin on " + version.getFullVersion()); + if (version.isObsolete()) { + final int major = GraalVM.Version.CURRENT.major; + final int minor = GraalVM.Version.CURRENT.minor; + throw new IllegalStateException("Out of date version of GraalVM detected: " + version.getFullVersion() + "." + + " Quarkus currently supports " + major + "." + minor + ". Please upgrade GraalVM to this version."); } } - private static File getNativeImageExecutable(Optional graalVmHome, File javaHome, Map env) { - String imageName = SystemUtils.IS_OS_WINDOWS ? "native-image.cmd" : "native-image"; - if (graalVmHome.isPresent()) { - File file = Paths.get(graalVmHome.get(), "bin", imageName).toFile(); + private static NativeImageBuildLocalRunner getNativeImageBuildLocalRunner(NativeConfig nativeConfig) { + String executableName = getNativeImageExecutableName(); + if (nativeConfig.graalvmHome.isPresent()) { + File file = Paths.get(nativeConfig.graalvmHome.get(), "bin", executableName).toFile(); if (file.exists()) { - return file; + return new NativeImageBuildLocalRunner(file.getAbsolutePath()); + } + } + + File javaHome = nativeConfig.javaHome; + if (javaHome == null) { + // try system property first - it will be the JAVA_HOME used by the current JVM + String home = System.getProperty(JAVA_HOME_SYS); + if (home == null) { + // No luck, somewhat a odd JVM not enforcing this property + // try with the JAVA_HOME environment variable + home = System.getenv(JAVA_HOME_ENV); + } + + if (home != null) { + javaHome = new File(home); } } if (javaHome != null) { - File file = new File(javaHome, "bin/" + imageName); + File file = new File(javaHome, "bin/" + executableName); if (file.exists()) { - return file; + return new NativeImageBuildLocalRunner(file.getAbsolutePath()); } } // System path - String systemPath = env.get(PATH); + String systemPath = System.getenv(PATH); if (systemPath != null) { String[] pathDirs = systemPath.split(File.pathSeparator); for (String pathDir : pathDirs) { File dir = new File(pathDir); if (dir.isDirectory()) { - File file = new File(dir, imageName); + File file = new File(dir, executableName); if (file.exists()) { - return file; + return new NativeImageBuildLocalRunner(file.getAbsolutePath()); } } } } - throw new RuntimeException("Cannot find the `" + imageName + "` in the GRAALVM_HOME, JAVA_HOME and System " + - "PATH. Install it using `gu install native-image`"); - - } - - private static String getLinuxID(String option) { - Process process; - - try { - StringBuilder responseBuilder = new StringBuilder(); - String line; - - ProcessBuilder idPB = new ProcessBuilder().command("id", option); - idPB.redirectError(new File("/dev/null")); - idPB.redirectInput(new File("/dev/null")); - - process = idPB.start(); - try (InputStream inputStream = process.getInputStream()) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { - while ((line = reader.readLine()) != null) { - responseBuilder.append(line); - } - safeWaitFor(process); - return responseBuilder.toString(); - } - } catch (Throwable t) { - safeWaitFor(process); - throw t; - } - } catch (IOException e) { //from process.start() - //swallow and return null id - return null; - } + return null; } - static void safeWaitFor(Process process) { - boolean intr = false; - try { - for (;;) - try { - process.waitFor(); - return; - } catch (InterruptedException ex) { - intr = true; - } - } finally { - if (intr) - Thread.currentThread().interrupt(); - } + private static String getNativeImageExecutableName() { + return SystemUtils.IS_OS_WINDOWS ? "native-image.cmd" : "native-image"; } private static String detectNoPIE() { @@ -535,9 +435,9 @@ private static String testGCCArgument(String argument) { return ""; } - private boolean objcopyExists(Map env) { + private boolean objcopyExists() { // System path - String systemPath = env.get(PATH); + String systemPath = System.getenv(PATH); if (systemPath != null) { String[] pathDirs = systemPath.split(File.pathSeparator); for (String pathDir : pathDirs) { @@ -551,7 +451,6 @@ private boolean objcopyExists(Map env) { } } - log.debug("Cannot find executable (objcopy) to separate symbols from executable."); return false; } @@ -565,7 +464,9 @@ private static void objcopy(String... args) { final List command = new ArrayList<>(args.length + 1); command.add("objcopy"); command.addAll(Arrays.asList(args)); - log.infof("Execute %s", command); + if (log.isDebugEnabled()) { + log.debugf("Execute %s", String.join(" ", command)); + } Process process = null; try { process = new ProcessBuilder(command).start(); @@ -578,4 +479,367 @@ private static void objcopy(String... args) { } } } + + protected static final class GraalVM { + static final class Version implements Comparable { + private static final Pattern PATTERN = Pattern.compile( + "GraalVM Version (([1-9][0-9]*)\\.([0-9]+)\\.[0-9]+|\\p{XDigit}*)[^(\n$]*(\\(Mandrel Distribution\\))?\\s*"); + + static final Version UNVERSIONED = new Version("Undefined", -1, -1, Distribution.ORACLE); + static final Version SNAPSHOT_ORACLE = new Version("Snapshot", Integer.MAX_VALUE, Integer.MAX_VALUE, + Distribution.ORACLE); + static final Version SNAPSHOT_MANDREL = new Version("Snapshot", Integer.MAX_VALUE, Integer.MAX_VALUE, + Distribution.MANDREL); + + static final Version VERSION_20_3 = new Version("GraalVM 20.3", 20, 3, Distribution.ORACLE); + static final Version VERSION_21_0 = new Version("GraalVM 21.0", 21, 0, Distribution.ORACLE); + + static final Version MINIMUM = VERSION_20_3; + static final Version CURRENT = VERSION_21_0; + + final String fullVersion; + final int major; + final int minor; + final Distribution distribution; + + Version(String fullVersion, int major, int minor, Distribution distro) { + this.fullVersion = fullVersion; + this.major = major; + this.minor = minor; + this.distribution = distro; + } + + String getFullVersion() { + return fullVersion; + } + + boolean isDetected() { + return this != UNVERSIONED; + } + + boolean isObsolete() { + return this.compareTo(MINIMUM) < 0; + } + + boolean isMandrel() { + return distribution == Distribution.MANDREL; + } + + boolean isSnapshot() { + return this == SNAPSHOT_ORACLE || this == SNAPSHOT_MANDREL; + } + + boolean isNewerThan(Version version) { + return this.compareTo(version) > 0; + } + + @Override + public int compareTo(Version o) { + if (major > o.major) { + return 1; + } + + if (major == o.major) { + if (minor > o.minor) { + return 1; + } else if (minor == o.minor) { + return 0; + } + } + + return -1; + } + + static Version of(Stream lines) { + final Iterator it = lines.iterator(); + while (it.hasNext()) { + final String line = it.next(); + final Matcher matcher = PATTERN.matcher(line); + if (matcher.find() && matcher.groupCount() >= 3) { + final String distro = matcher.group(4); + if (isSnapshot(matcher.group(2))) { + return isMandrel(distro) ? SNAPSHOT_MANDREL : SNAPSHOT_ORACLE; + } else { + return new Version( + line, + Integer.parseInt(matcher.group(2)), Integer.parseInt(matcher.group(3)), + isMandrel(distro) ? Distribution.MANDREL : Distribution.ORACLE); + } + } + } + + return UNVERSIONED; + } + + private static boolean isSnapshot(String s) { + return s == null; + } + + private static boolean isMandrel(String s) { + return "(Mandrel Distribution)".equals(s); + } + + @Override + public String toString() { + return "Version{" + + "major=" + major + + ", minor=" + minor + + ", distribution=" + distribution + + '}'; + } + } + + enum Distribution { + ORACLE, + MANDREL; + } + } + + private static class NativeImageInvokerInfo { + private final List args; + + private NativeImageInvokerInfo(List args) { + this.args = args; + } + + List getArgs() { + return args; + } + + static class Builder { + private NativeConfig nativeConfig; + private OutputTargetBuildItem outputTargetBuildItem; + private List nativeImageProperties; + private Path outputDir; + private String runnerJarName; + private String noPIE = ""; + private boolean isContainerBuild = false; + private GraalVM.Version graalVMVersion = GraalVM.Version.UNVERSIONED; + private String nativeImageName; + + public Builder setNativeConfig(NativeConfig nativeConfig) { + this.nativeConfig = nativeConfig; + return this; + } + + public Builder setOutputTargetBuildItem(OutputTargetBuildItem outputTargetBuildItem) { + this.outputTargetBuildItem = outputTargetBuildItem; + return this; + } + + public Builder setNativeImageProperties(List nativeImageProperties) { + this.nativeImageProperties = nativeImageProperties; + return this; + } + + public Builder setOutputDir(Path outputDir) { + this.outputDir = outputDir; + return this; + } + + public Builder setRunnerJarName(String runnerJarName) { + this.runnerJarName = runnerJarName; + return this; + } + + public Builder setNoPIE(String noPIE) { + this.noPIE = noPIE; + return this; + } + + public Builder setContainerBuild(boolean containerBuild) { + isContainerBuild = containerBuild; + return this; + } + + public Builder setGraalVMVersion(GraalVM.Version graalVMVersion) { + this.graalVMVersion = graalVMVersion; + return this; + } + + public Builder setNativeImageName(String nativeImageName) { + this.nativeImageName = nativeImageName; + return this; + } + + public NativeImageInvokerInfo build() { + List nativeImageArgs = new ArrayList<>(); + boolean enableSslNative = false; + boolean enableAllSecurityServices = nativeConfig.enableAllSecurityServices; + boolean addAllCharsets = nativeConfig.addAllCharsets; + boolean enableHttpsUrlHandler = nativeConfig.enableHttpsUrlHandler; + for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) { + //todo: this should be specific build items + if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) { + enableSslNative = Boolean.parseBoolean(prop.getValue()); + } else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) { + log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false." + + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + + " will be removed in a future Quarkus version."); + } else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) { + enableAllSecurityServices |= Boolean.parseBoolean(prop.getValue()); + } else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) { + addAllCharsets |= Boolean.parseBoolean(prop.getValue()); + } else { + // todo maybe just -D is better than -J-D in this case + if (prop.getValue() == null) { + nativeImageArgs.add("-J-D" + prop.getKey()); + } else { + nativeImageArgs.add("-J-D" + prop.getKey() + "=" + prop.getValue()); + } + } + } + if (nativeConfig.userLanguage.isPresent()) { + nativeImageArgs.add("-J-Duser.language=" + nativeConfig.userLanguage.get()); + } + if (nativeConfig.userCountry.isPresent()) { + nativeImageArgs.add("-J-Duser.country=" + nativeConfig.userCountry.get()); + } + nativeImageArgs.add("-J-Dfile.encoding=" + nativeConfig.fileEncoding); + + if (enableSslNative) { + enableHttpsUrlHandler = true; + enableAllSecurityServices = true; + } + + handleAdditionalProperties(nativeConfig, nativeImageArgs, isContainerBuild, outputDir); + nativeImageArgs.add("--initialize-at-build-time="); + nativeImageArgs.add( + "-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime"); //the default collection policy results in full GC's 50% of the time + nativeImageArgs.add("-H:+JNI"); + nativeImageArgs.add("-H:+AllowFoldMethods"); + nativeImageArgs.add("-jar"); + nativeImageArgs.add(runnerJarName); + + if (nativeConfig.enableFallbackImages) { + nativeImageArgs.add("-H:FallbackThreshold=5"); + } else { + //Default: be strict as those fallback images aren't very useful + //and tend to cover up real problems. + nativeImageArgs.add("-H:FallbackThreshold=0"); + } + + if (nativeConfig.reportErrorsAtRuntime) { + nativeImageArgs.add("-H:+ReportUnsupportedElementsAtRuntime"); + } + if (nativeConfig.reportExceptionStackTraces) { + nativeImageArgs.add("-H:+ReportExceptionStackTraces"); + } + if (nativeConfig.debug.enabled) { + nativeImageArgs.add("-g"); + nativeImageArgs.add("-H:DebugInfoSourceSearchPath=" + APP_SOURCES); + } + if (nativeConfig.debugBuildProcess) { + nativeImageArgs + .add("-J-Xrunjdwp:transport=dt_socket,address=" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y"); + } + if (nativeConfig.enableReports) { + nativeImageArgs.add("-H:+PrintAnalysisCallTree"); + } + if (nativeConfig.dumpProxies) { + nativeImageArgs.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true"); + if (nativeConfig.enableServer) { + log.warn( + "Options dumpProxies and enableServer are both enabled: this will get the proxies dumped in an unknown external working directory"); + } + } + if (nativeConfig.nativeImageXmx.isPresent()) { + nativeImageArgs.add("-J-Xmx" + nativeConfig.nativeImageXmx.get()); + } + List protocols = new ArrayList<>(2); + if (nativeConfig.enableHttpUrlHandler) { + protocols.add("http"); + } + if (enableHttpsUrlHandler) { + protocols.add("https"); + } + if (addAllCharsets) { + nativeImageArgs.add("-H:+AddAllCharsets"); + } else { + nativeImageArgs.add("-H:-AddAllCharsets"); + } + if (!protocols.isEmpty()) { + nativeImageArgs.add("-H:EnableURLProtocols=" + String.join(",", protocols)); + } + if (enableAllSecurityServices) { + nativeImageArgs.add("--enable-all-security-services"); + } + if (!noPIE.isEmpty()) { + nativeImageArgs.add("-H:NativeLinkerOption=" + noPIE); + } + + if (!nativeConfig.enableIsolates) { + nativeImageArgs.add("-H:-SpawnIsolates"); + } + if (!nativeConfig.enableJni) { + log.warn( + "Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false." + + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + + " will be removed in a future Quarkus version."); + } + if (!nativeConfig.enableServer && !SystemUtils.IS_OS_WINDOWS && !graalVMVersion.isMandrel()) { + nativeImageArgs.add("--no-server"); + } + if (nativeConfig.enableVmInspection) { + nativeImageArgs.add("-H:+AllowVMInspection"); + } + if (nativeConfig.autoServiceLoaderRegistration) { + nativeImageArgs.add("-H:+UseServiceLoaderFeature"); + //When enabling, at least print what exactly is being added: + nativeImageArgs.add("-H:+TraceServiceLoaderFeature"); + } else { + nativeImageArgs.add("-H:-UseServiceLoaderFeature"); + } + if (nativeConfig.fullStackTraces) { + nativeImageArgs.add("-H:+StackTrace"); + } else { + nativeImageArgs.add("-H:-StackTrace"); + } + + if (nativeConfig.enableDashboardDump) { + nativeImageArgs.add("-H:DashboardDump=" + outputTargetBuildItem.getBaseName() + "_dashboard.dump"); + nativeImageArgs.add("-H:+DashboardAll"); + } + + nativeImageArgs.add(nativeImageName); + + return new NativeImageInvokerInfo(nativeImageArgs); + } + + private void handleAdditionalProperties(NativeConfig nativeConfig, List command, boolean isContainerBuild, + Path outputDir) { + if (nativeConfig.additionalBuildArgs.isPresent()) { + List strings = nativeConfig.additionalBuildArgs.get(); + for (String buildArg : strings) { + String trimmedBuildArg = buildArg.trim(); + if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && isContainerBuild) { + /* + * When the native binary is being built with a docker container, because a volume is created, + * we need to copy the trustStore file into the output directory (which is the root of volume) + * and change the value of 'javax.net.ssl.trustStore' property to point to this value + * + * TODO: we might want to introduce a dedicated property in order to overcome this ugliness + */ + int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER); + if (trimmedBuildArg.length() > index + 2) { + String configuredTrustStorePath = trimmedBuildArg + .substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length()); + try { + IoUtils.copy(Paths.get(configuredTrustStorePath), + outputDir.resolve(MOVED_TRUST_STORE_NAME)); + command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER + + CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME); + } catch (IOException e) { + throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath + + "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e); + } + } + } else { + command.add(trimmedBuildArg); + } + } + } + } + } + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeOrNativeSourcesBuild.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeOrNativeSourcesBuild.java new file mode 100644 index 0000000000000..dfd4a191a531c --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeOrNativeSourcesBuild.java @@ -0,0 +1,24 @@ +package io.quarkus.deployment.pkg.steps; + +import java.util.function.BooleanSupplier; + +import io.quarkus.deployment.pkg.PackageConfig; + +/** + * Supplier that can be used to only run build steps in the + * native or native sources builds. + */ +public class NativeOrNativeSourcesBuild implements BooleanSupplier { + + private final PackageConfig packageConfig; + + NativeOrNativeSourcesBuild(PackageConfig packageConfig) { + this.packageConfig = packageConfig; + } + + @Override + public boolean getAsBoolean() { + return packageConfig.type.equalsIgnoreCase(PackageConfig.NATIVE) + || packageConfig.type.equalsIgnoreCase(PackageConfig.NATIVE_SOURCES); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeSourcesBuild.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeSourcesBuild.java new file mode 100644 index 0000000000000..ecac3a5fb9f22 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeSourcesBuild.java @@ -0,0 +1,23 @@ +package io.quarkus.deployment.pkg.steps; + +import java.util.function.BooleanSupplier; + +import io.quarkus.deployment.pkg.PackageConfig; + +/** + * Supplier that can be used to only run build steps in the + * native sources build. + */ +public class NativeSourcesBuild implements BooleanSupplier { + + private final PackageConfig packageConfig; + + NativeSourcesBuild(PackageConfig packageConfig) { + this.packageConfig = packageConfig; + } + + @Override + public boolean getAsBoolean() { + return packageConfig.type.equalsIgnoreCase(PackageConfig.NATIVE_SOURCES); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/PackageTypeVerificationBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/PackageTypeVerificationBuildStep.java index d37ef4c42d026..d1df71dfe5814 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/PackageTypeVerificationBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/PackageTypeVerificationBuildStep.java @@ -11,15 +11,17 @@ import io.quarkus.deployment.pkg.builditem.PackageTypeBuildItem; /** - * verifies that the requested package type can actually be produced + * Verifies that the requested package type can actually be produced */ public class PackageTypeVerificationBuildStep { @BuildStep List builtins() { - return Arrays.asList(new PackageTypeBuildItem(PackageConfig.NATIVE), new PackageTypeBuildItem(PackageConfig.JAR), - new PackageTypeBuildItem(PackageConfig.LEGACY), new PackageTypeBuildItem(PackageConfig.UBER_JAR), - new PackageTypeBuildItem(PackageConfig.FAST_JAR), + return Arrays.asList(new PackageTypeBuildItem(PackageConfig.NATIVE), + new PackageTypeBuildItem(PackageConfig.NATIVE_SOURCES), + new PackageTypeBuildItem(PackageConfig.JAR), new PackageTypeBuildItem(PackageConfig.FAST_JAR), + new PackageTypeBuildItem(PackageConfig.LEGACY_JAR), + new PackageTypeBuildItem(PackageConfig.UBER_JAR), new PackageTypeBuildItem(PackageConfig.MUTABLE_JAR)); } @@ -27,7 +29,7 @@ List builtins() { ServiceStartBuildItem verify(List items, PackageConfig config) { Set registered = items.stream().map(PackageTypeBuildItem::getType).collect(Collectors.toSet()); if (!registered.contains(config.type)) { - throw new IllegalStateException("Unknown packaging type " + config.type + " known types are " + registered); + throw new IllegalStateException("Unknown packaging type '" + config.type + "' known types are " + registered); } return null; } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java b/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java index 5bb3925af80f1..aea47243a02fc 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java @@ -13,6 +13,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.time.Duration; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -62,7 +63,9 @@ import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.StartupContext; import io.quarkus.runtime.StartupTask; +import io.quarkus.runtime.annotations.IgnoreProperty; import io.quarkus.runtime.annotations.RecordableConstructor; +import io.quarkus.runtime.annotations.RelaxedValidation; /** * A class that can be used to record invocations to bytecode so they can be replayed later. This is done through the @@ -114,15 +117,15 @@ public class BytecodeRecorderImpl implements RecorderContext { private final Map, NonDefaultConstructorHolder> nonDefaultConstructors = new HashMap<>(); private final String className; - private final String buildStepName; - private final String methodName; + private final Function classCreatorFunction; + private final Function methodCreatorFunction; private final List loaders = new ArrayList<>(); /** * the maximum number of instruction groups that can be added to a method. This is to limit the size of the method * so that the 65k limit is not reached. - * + *

* This is fairly arbitrary, as there is no fixed size for the instruction groups, but in practice this limit * seems to be fairly reasonable */ @@ -132,22 +135,57 @@ public class BytecodeRecorderImpl implements RecorderContext { private boolean loadComplete; public BytecodeRecorderImpl(boolean staticInit, String buildStepName, String methodName, String uniqueHash) { - this(Thread.currentThread().getContextClassLoader(), staticInit, - BASE_PACKAGE + buildStepName + "$" + methodName + uniqueHash, buildStepName, methodName); + this( + Thread.currentThread().getContextClassLoader(), + staticInit, + toClassName(buildStepName, methodName, uniqueHash), + classOutput -> { + return startupTaskClassCreator(classOutput, toClassName(buildStepName, methodName, uniqueHash)); + }, + classCreator -> { + return startupMethodCreator(buildStepName, methodName, classCreator); + }); + } + + private static MethodCreator startupMethodCreator(String buildStepName, String methodName, ClassCreator classCreator) { + MethodCreator mainMethod = classCreator.getMethodCreator("deploy", void.class, StartupContext.class); + + // record the build step name + if ((buildStepName != null) && (methodName != null)) { + mainMethod.invokeVirtualMethod(ofMethod(StartupContext.class, "setCurrentBuildStepName", void.class, String.class), + mainMethod.getMethodParam(0), mainMethod.load(buildStepName + "." + methodName)); + } + return mainMethod; + } + + private static ClassCreator startupTaskClassCreator(ClassOutput classOutput, String className) { + return ClassCreator.builder().classOutput(classOutput).className(className).superClass(Object.class) + .interfaces(StartupTask.class).build(); + } + + private static String toClassName(String buildStepName, String methodName, String uniqueHash) { + return BASE_PACKAGE + buildStepName + "$" + methodName + uniqueHash; } // visible for testing BytecodeRecorderImpl(ClassLoader classLoader, boolean staticInit, String className) { - this(classLoader, staticInit, className, null, null); + this(classLoader, staticInit, className, + classOutput -> { + return startupTaskClassCreator(classOutput, className); + }, + classCreator -> { + return startupMethodCreator(null, null, classCreator); + }); } - private BytecodeRecorderImpl(ClassLoader classLoader, boolean staticInit, String className, String buildStepName, - String methodName) { + public BytecodeRecorderImpl(ClassLoader classLoader, boolean staticInit, String className, + Function classCreatorFunction, + Function methodCreatorFunction) { this.classLoader = classLoader; this.staticInit = staticInit; this.className = className; - this.buildStepName = buildStepName; - this.methodName = methodName; + this.classCreatorFunction = classCreatorFunction; + this.methodCreatorFunction = methodCreatorFunction; } public boolean isEmpty() { @@ -344,23 +382,23 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl + "() directly on an object returned from the bytecode recorder, you can only pass it back into the recorder as a parameter"); } }); - ProxyInstance instance = new ProxyInstance(proxyInstance, key); - return instance; + return new ProxyInstance(proxyInstance, key); } public String getClassName() { return className; } - public void writeBytecode(ClassOutput classOutput) { - ClassCreator file = ClassCreator.builder().classOutput(classOutput) - .className(className) - .superClass(Object.class).interfaces(StartupTask.class).build(); - MethodCreator mainMethod = file.getMethodCreator("deploy", void.class, StartupContext.class); + private Map.Entry prepareBytecodeWriting(ClassOutput classOutput) { + ClassCreator file = classCreatorFunction.apply(classOutput); + MethodCreator mainMethod = methodCreatorFunction.apply(file); + return new AbstractMap.SimpleEntry<>(file, mainMethod); + } - // record the build step name - mainMethod.invokeVirtualMethod(ofMethod(StartupContext.class, "setCurrentBuildStepName", void.class, String.class), - mainMethod.getMethodParam(0), mainMethod.load(buildStepName + "." + methodName)); + public void writeBytecode(ClassOutput classOutput) { + Map.Entry entry = prepareBytecodeWriting(classOutput); + ClassCreator file = entry.getKey(); + MethodCreator mainMethod = entry.getValue(); //now create instances of all the classes we invoke on and store them in variables as well Map classInstanceVariables = new HashMap<>(); @@ -407,7 +445,8 @@ ResultHandle createValue(MethodContext context, MethodCreator method, ResultHand //even if the code for an invocation is split over several methods for (int i = 0; i < call.parameters.length; ++i) { call.deferredParameters[i] = loadObjectInstance(call.parameters[i], parameterMap, - call.method.getParameterTypes()[i]); + call.method.getParameterTypes()[i], Arrays.stream(call.method.getParameterAnnotations()[i]) + .anyMatch(s -> s.annotationType() == RelaxedValidation.class)); } } catch (Exception e) { throw new RuntimeException("Failed to record call to method " + call.method, e); @@ -498,14 +537,15 @@ public void write(MethodContext context, MethodCreator method, ResultHandle arra /** * Returns a representation of a serialized parameter. */ - private DeferredParameter loadObjectInstance(Object param, Map existing, Class expectedType) { + private DeferredParameter loadObjectInstance(Object param, Map existing, Class expectedType, + boolean relaxedValidation) { if (loadComplete) { throw new RuntimeException("All parameters have already been loaded, it is too late to call loadObjectInstance"); } if (existing.containsKey(param)) { return existing.get(param); } - DeferredParameter ret = loadObjectInstanceImpl(param, existing, expectedType); + DeferredParameter ret = loadObjectInstanceImpl(param, existing, expectedType, relaxedValidation); existing.put(param, ret); return ret; } @@ -514,7 +554,7 @@ private DeferredParameter loadObjectInstance(Object param, Map existing, - Class expectedType) { + Class expectedType, boolean relaxedValidation) { //null is easy if (param == null) { return new DeferredParameter() { @@ -531,7 +571,7 @@ ResultHandle doLoad(MethodContext creator, MethodCreator method, ResultHandle ar } //Handle empty collections as returned by the Collections object - loadedObject = handleCollectionsObjects(param, existing); + loadedObject = handleCollectionsObjects(param, existing, relaxedValidation); if (loadedObject != null) { return loadedObject; } @@ -547,7 +587,7 @@ ResultHandle doLoad(MethodContext creator, MethodCreator method, ResultHandle ar try { ObjectSubstitution substitution = holder.sub.newInstance(); Object res = substitution.serialize(param); - DeferredParameter serialized = loadObjectInstance(res, existing, holder.to); + DeferredParameter serialized = loadObjectInstance(res, existing, holder.to, relaxedValidation); SubstitutionHolder finalHolder = holder; return new DeferredArrayStoreParameter() { @@ -573,7 +613,7 @@ ResultHandle createValue(MethodContext creator, MethodCreator method, ResultHand } else if (param instanceof Optional) { Optional val = (Optional) param; if (val.isPresent()) { - DeferredParameter res = loadObjectInstance(val.get(), existing, Object.class); + DeferredParameter res = loadObjectInstance(val.get(), existing, Object.class, relaxedValidation); return new DeferredArrayStoreParameter() { @Override @@ -584,7 +624,10 @@ void doPrepare(MethodContext context) { @Override ResultHandle createValue(MethodContext context, MethodCreator method, ResultHandle array) { - return method.invokeStaticMethod(ofMethod(Optional.class, "of", Optional.class, Object.class), + // If the value is a proxy, it may be non-null at build time but become null + // when we actually create the value during initialization; + // so we need to use 'ofNullable' and not 'of' here. + return method.invokeStaticMethod(ofMethod(Optional.class, "ofNullable", Optional.class, Object.class), context.loadDeferred(res)); } }; @@ -830,7 +873,7 @@ ResultHandle doLoad(MethodContext context, MethodCreator method, ResultHandle ar for (int i = 0; i < length; ++i) { DeferredParameter component = loadObjectInstance(Array.get(param, i), existing, - expectedType.getComponentType()); + expectedType.getComponentType(), relaxedValidation); components[i] = component; } return new DeferredArrayStoreParameter() { @@ -868,7 +911,7 @@ ResultHandle createValue(MethodContext context, MethodCreator method, ResultHand Object explicitValue = annotationProxy.getValues().get(valueMethod.name()); if (explicitValue != null) { constructorParamsHandles[iterator.previousIndex()] = loadObjectInstance(explicitValue, existing, - explicitValue.getClass()); + explicitValue.getClass(), relaxedValidation); } else { AnnotationValue value = annotationValues.get(valueMethod.name()); if (value == null) { @@ -877,7 +920,7 @@ ResultHandle createValue(MethodContext context, MethodCreator method, ResultHand Object defaultValue = annotationProxy.getDefaultValues().get(valueMethod.name()); if (defaultValue != null) { constructorParamsHandles[iterator.previousIndex()] = loadObjectInstance(defaultValue, existing, - defaultValue.getClass()); + defaultValue.getClass(), relaxedValidation); continue; } if (value == null) { @@ -910,7 +953,7 @@ ResultHandle createValue(MethodContext context, MethodCreator method, ResultHand }; } else { - return loadComplexObject(param, existing, expectedType); + return loadComplexObject(param, existing, expectedType, relaxedValidation); } } @@ -920,9 +963,11 @@ ResultHandle createValue(MethodContext context, MethodCreator method, ResultHand * * @param param The object to load * @param existing + * @param relaxedValidation * @return */ - private DeferredParameter handleCollectionsObjects(Object param, Map existing) { + private DeferredParameter handleCollectionsObjects(Object param, Map existing, + boolean relaxedValidation) { if (param instanceof Collection) { if (param.getClass().equals(Collections.emptyList().getClass())) { return new DeferredParameter() { @@ -955,7 +1000,8 @@ ResultHandle doLoad(MethodContext context, MethodCreator method, ResultHandle ar } }; } else if (param.getClass().equals(SINGLETON_LIST_CLASS)) { - DeferredParameter deferred = loadObjectInstance(((List) param).get(0), existing, Object.class); + DeferredParameter deferred = loadObjectInstance(((List) param).get(0), existing, Object.class, + relaxedValidation); return new DeferredParameter() { @Override void doPrepare(MethodContext context) { @@ -971,7 +1017,8 @@ ResultHandle doLoad(MethodContext context, MethodCreator method, ResultHandle ar } }; } else if (param.getClass().equals(SINGLETON_SET_CLASS)) { - DeferredParameter deferred = loadObjectInstance(((Set) param).iterator().next(), existing, Object.class); + DeferredParameter deferred = loadObjectInstance(((Set) param).iterator().next(), existing, Object.class, + relaxedValidation); return new DeferredParameter() { @Override void doPrepare(MethodContext context) { @@ -1014,8 +1061,8 @@ ResultHandle doLoad(MethodContext context, MethodCreator method, ResultHandle ar } else if (param.getClass().equals(SINGLETON_MAP_CLASS)) { Map.Entry entry = ((Map) param).entrySet().iterator().next(); - DeferredParameter key = loadObjectInstance(entry.getKey(), existing, Object.class); - DeferredParameter value = loadObjectInstance(entry.getValue(), existing, Object.class); + DeferredParameter key = loadObjectInstance(entry.getKey(), existing, Object.class, relaxedValidation); + DeferredParameter value = loadObjectInstance(entry.getValue(), existing, Object.class, relaxedValidation); return new DeferredParameter() { @Override void doPrepare(MethodContext context) { @@ -1042,26 +1089,30 @@ ResultHandle doLoad(MethodContext context, MethodCreator method, ResultHandle ar /** * Created a {@link DeferredParameter} to load a complex object, such as a javabean or collection. This is basically - * just an extension of {@link #loadObjectInstanceImpl(Object, Map, Class)} but it removes some of the more complex + * just an extension of {@link #loadObjectInstanceImpl(Object, Map, Class, boolean)} but it removes some of the more complex * code from that method. * * @param param The object to load * @param existing The existing object map * @param expectedType The expected type of the object + * @param relaxedValidation * @return */ private DeferredParameter loadComplexObject(Object param, Map existing, - Class expectedType) { + Class expectedType, boolean relaxedValidation) { //a list of steps that are performed on the object after it has been created //we need to create all these first, to ensure the required objects have already //been deserialized List setupSteps = new ArrayList<>(); List ctorSetupSteps = new ArrayList<>(); + boolean relaxedOk = false; if (param instanceof Collection) { //if this is a collection we want to serialize every element for (Object i : (Collection) param) { - DeferredParameter val = loadObjectInstance(i, existing, i.getClass()); + DeferredParameter val = i != null + ? loadObjectInstance(i, existing, i.getClass(), relaxedValidation) + : loadObjectInstance(null, existing, Object.class, relaxedValidation); setupSteps.add(new SerialzationStep() { @Override public void handle(MethodContext context, MethodCreator method, DeferredArrayStoreParameter out) { @@ -1076,14 +1127,15 @@ public void prepare(MethodContext context) { } }); } + relaxedOk = true; } if (param instanceof Map) { //map works the same as collection for (Map.Entry i : ((Map) param).entrySet()) { - DeferredParameter key = loadObjectInstance(i.getKey(), existing, i.getKey().getClass()); + DeferredParameter key = loadObjectInstance(i.getKey(), existing, i.getKey().getClass(), relaxedValidation); DeferredParameter val = i.getValue() != null - ? loadObjectInstance(i.getValue(), existing, i.getValue().getClass()) - : loadObjectInstance(null, existing, Object.class); + ? loadObjectInstance(i.getValue(), existing, i.getValue().getClass(), relaxedValidation) + : loadObjectInstance(null, existing, Object.class, relaxedValidation); setupSteps.add(new SerialzationStep() { @Override public void handle(MethodContext context, MethodCreator method, DeferredArrayStoreParameter out) { @@ -1098,6 +1150,7 @@ public void prepare(MethodContext context) { } }); } + relaxedOk = true; } //check how the object is constructed @@ -1119,7 +1172,7 @@ public void prepare(MethodContext context) { for (int i = 0; i < params.size(); i++) { Object obj = params.get(i); nonDefaultConstructorHandles[i] = loadObjectInstance(obj, existing, - nonDefaultConstructorHolder.constructor.getParameterTypes()[count++]); + nonDefaultConstructorHolder.constructor.getParameterTypes()[count++], relaxedValidation); } } else { for (Constructor ctor : param.getClass().getConstructors()) { @@ -1138,6 +1191,18 @@ public void prepare(MethodContext context) { Set handledProperties = new HashSet<>(); Property[] desc = PropertyUtils.getPropertyDescriptors(param); for (Property i : desc) { + // check if the getter is ignored + if ((i.getReadMethod() != null) && (i.getReadMethod().getAnnotation(IgnoreProperty.class) != null)) { + continue; + } + // check if the matching field is ignored + try { + if (param.getClass().getDeclaredField(i.getName()).getAnnotation(IgnoreProperty.class) != null) { + continue; + } + } catch (NoSuchFieldException ignored) { + + } Integer ctorParamIndex = constructorParamNameMap.remove(i.name); if (i.getReadMethod() != null && i.getWriteMethod() == null && ctorParamIndex == null) { try { @@ -1152,7 +1217,7 @@ public void prepare(MethodContext context) { List params = new ArrayList<>(); for (Object c : propertyValue) { - DeferredParameter toAdd = loadObjectInstance(c, existing, Object.class); + DeferredParameter toAdd = loadObjectInstance(c, existing, Object.class, relaxedValidation); params.add(toAdd); } @@ -1191,8 +1256,9 @@ public void prepare(MethodContext context) { Map def = new LinkedHashMap<>(); for (Map.Entry entry : propertyValue.entrySet()) { DeferredParameter key = loadObjectInstance(entry.getKey(), existing, - Object.class); - DeferredParameter val = loadObjectInstance(entry.getValue(), existing, Object.class); + Object.class, relaxedValidation); + DeferredParameter val = loadObjectInstance(entry.getValue(), existing, Object.class, + relaxedValidation); def.put(key, val); } setupSteps.add(new SerialzationStep() { @@ -1218,6 +1284,17 @@ public void prepare(MethodContext context) { } }); } + } else if (!relaxedValidation && !i.getName().equals("class") && !relaxedOk + && nonDefaultConstructorHolder == null) { + //check if there is actually a field with the name + try { + i.getReadMethod().getDeclaringClass().getDeclaredField(i.getName()); + throw new RuntimeException("Cannot serialise field '" + i.getName() + "' on object '" + param + + "' as the property is read only"); + } catch (NoSuchFieldException e) { + //if there is no underlying field then we ignore the property + } + } } catch (Exception e) { @@ -1236,23 +1313,28 @@ public void prepare(MethodContext context) { Class propertyType = i.getPropertyType(); if (ctorParamIndex == null && i.getReadMethod().getReturnType() != i.getWriteMethod().getParameterTypes()[0]) { - //this is a weird situation where the reader and writer are different types - //we iterate and try and find a valid setter method for the type we have - //OpenAPI does some weird stuff like this - - for (Method m : param.getClass().getMethods()) { - if (m.getName().equals(i.getWriteMethod().getName())) { - if (m.getParameterTypes().length > 0 - && m.getParameterTypes()[0].isAssignableFrom(param.getClass())) { - propertyType = m.getParameterTypes()[0]; - break; + if (relaxedValidation) { + //this is a weird situation where the reader and writer are different types + //we iterate and try and find a valid setter method for the type we have + //OpenAPI does some weird stuff like this + + for (Method m : param.getClass().getMethods()) { + if (m.getName().equals(i.getWriteMethod().getName())) { + if (m.getParameterTypes().length > 0 + && m.getParameterTypes()[0].isAssignableFrom(param.getClass())) { + propertyType = m.getParameterTypes()[0]; + break; + } } - } + } + } else { + throw new RuntimeException("Cannot serialise field " + i.getName() + " on object " + param + + " as setter and getters were different types"); } } DeferredParameter val = loadObjectInstance(propertyValue, existing, - i.getPropertyType()); + i.getPropertyType(), relaxedValidation); if (ctorParamIndex != null) { nonDefaultConstructorHandles[ctorParamIndex] = val; ctorSetupSteps.add(new SerialzationStep() { @@ -1293,13 +1375,18 @@ public void prepare(MethodContext context) { //now handle accessible fields for (Field field : param.getClass().getFields()) { + // check if the field is ignored + if (field.getAnnotation(IgnoreProperty.class) != null) { + continue; + } if (!handledProperties.contains(field.getName())) { Integer ctorParamIndex = constructorParamNameMap.remove(field.getName()); if ((ctorParamIndex != null || !Modifier.isFinal(field.getModifiers())) && !Modifier.isStatic(field.getModifiers())) { try { - DeferredParameter val = loadObjectInstance(field.get(param), existing, field.getType()); + DeferredParameter val = loadObjectInstance(field.get(param), existing, field.getType(), + relaxedValidation); if (ctorParamIndex != null) { nonDefaultConstructorHandles[ctorParamIndex] = val; ctorSetupSteps.add(new SerialzationStep() { @@ -1633,7 +1720,7 @@ static String componentType(MethodInfo method) { /** * A bytecode serialized value. This is an abstraction over ResultHandle, as ResultHandle * cannot span methods. - * + *

* Instances of DeferredParameter can be used in different methods */ abstract class DeferredParameter { @@ -1643,7 +1730,7 @@ abstract class DeferredParameter { /** * The function that is called to read the value for use. This may be by reading the value from the Object[] * array, or is may be a direct ldc instruction in the case of primitives. - * + *

* Code in this method is run in a single instruction group, so large objects should be serialized in the * {@link #doPrepare(MethodContext)} method instead *

diff --git a/core/deployment/src/main/java/io/quarkus/deployment/recording/substitutions/AdditionalSubstitutionsBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/recording/substitutions/AdditionalSubstitutionsBuildStep.java new file mode 100644 index 0000000000000..c4182eac9a96d --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/recording/substitutions/AdditionalSubstitutionsBuildStep.java @@ -0,0 +1,34 @@ +package io.quarkus.deployment.recording.substitutions; + +import java.time.ZoneId; + +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.ObjectSubstitutionBuildItem; +import io.quarkus.runtime.recording.substitutions.ZoneIdSubstitution; + +public class AdditionalSubstitutionsBuildStep { + + @BuildStep + public void additionalSubstitutions(BuildProducer producer) { + zoneIdSubstitutions(producer); + } + + @SuppressWarnings("unchecked") + private void zoneIdSubstitutions(BuildProducer producer) { + try { + /* + * We can't refer to these classes as they are package private but we need a handle on need + * because the bytecode recorder needs to have the actual class registered and not a super class + */ + + Class zoneRegionClass = (Class) Class.forName("java.time.ZoneRegion"); + producer.produce(new ObjectSubstitutionBuildItem(zoneRegionClass, String.class, ZoneIdSubstitution.class)); + + Class zoneOffsetClass = (Class) Class.forName("java.time.ZoneOffset"); + producer.produce(new ObjectSubstitutionBuildItem(zoneOffsetClass, String.class, ZoneIdSubstitution.class)); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("Improper registration of ZoneId substitution", e); + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/AdditionalClassLoaderResourcesBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/AdditionalClassLoaderResourcesBuildStep.java new file mode 100644 index 0000000000000..9f07f98382cc5 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/AdditionalClassLoaderResourcesBuildStep.java @@ -0,0 +1,45 @@ +package io.quarkus.deployment.steps; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.AdditionalClassLoaderResourcesBuildItem; +import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; + +public class AdditionalClassLoaderResourcesBuildStep { + + @BuildStep + void appendAdditionalClassloaderResources(BuildProducer producer, + List additionalResources) { + + if (!additionalResources.isEmpty()) { + QuarkusClassLoader cl = (QuarkusClassLoader) Thread.currentThread().getContextClassLoader(); + + Map collected = new LinkedHashMap(); + List additionalClassesToIndex = new ArrayList(); + for (AdditionalClassLoaderResourcesBuildItem item : additionalResources) { + + for (Entry entry : item.getResources().entrySet()) { + additionalClassesToIndex.add(entry.getKey()); + + collected.put(entry.getKey(), entry.getValue()); + // add it also as resources to allow index to work properly + collected.put(entry.getKey().replace('.', '/') + ".class", entry.getValue()); + + } + } + + cl.reset(collected, Collections.emptyMap()); + // produce the AdditionalIndexedClassesBuildItem so this build step + // is actually invoked and allow to directly index all the classes + producer.produce(new AdditionalIndexedClassesBuildItem(additionalClassesToIndex.stream().toArray(String[]::new))); + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ChangedClassesBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ChangedClassesBuildStep.java new file mode 100644 index 0000000000000..a7c955e8ccb42 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ChangedClassesBuildStep.java @@ -0,0 +1,70 @@ +package io.quarkus.deployment.steps; + +import java.util.HashMap; +import java.util.Map; + +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; +import org.jboss.jandex.IndexView; + +import io.quarkus.deployment.IsDevelopment; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.ChangedClassesBuildItem; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; +import io.quarkus.deployment.builditem.LiveReloadBuildItem; + +public class ChangedClassesBuildStep { + + private static volatile IndexView oldIndex; + + @BuildStep(onlyIf = IsDevelopment.class) + ChangedClassesBuildItem changedClassesBuildItem(CombinedIndexBuildItem combinedIndexBuildItem, + LiveReloadBuildItem liveReloadBuildItem) { + IndexView currentIndex = combinedIndexBuildItem.getIndex(); + if (liveReloadBuildItem.getChangeInformation() == null) { + oldIndex = currentIndex; + return null; + } + Map changedClassesNewVersion = new HashMap<>(); + Map changedClassesOldVersion = new HashMap<>(); + Map deletedClasses = new HashMap<>(); + Map addedClasses = new HashMap<>(); + for (String added : liveReloadBuildItem.getChangeInformation().getAddedClasses()) { + DotName name = DotName.createSimple(added); + ClassInfo clazz = currentIndex.getClassByName(name); + if (clazz == null) { + //should never happen, but we bail out to be paranoid + return null; + } + addedClasses.put(name, clazz); + } + for (String deleted : liveReloadBuildItem.getChangeInformation().getDeletedClasses()) { + DotName name = DotName.createSimple(deleted); + ClassInfo clazz = oldIndex.getClassByName(name); + if (clazz == null) { + //should never happen, but we bail out to be paranoid + return null; + } + addedClasses.put(name, clazz); + } + for (String mod : liveReloadBuildItem.getChangeInformation().getChangedClasses()) { + DotName name = DotName.createSimple(mod); + ClassInfo clazz = oldIndex.getClassByName(name); + if (clazz == null) { + //should never happen, but we bail out to be paranoid + return null; + } + changedClassesOldVersion.put(name, clazz); + clazz = currentIndex.getClassByName(name); + if (clazz == null) { + //should never happen, but we bail out to be paranoid + return null; + } + changedClassesNewVersion.put(name, clazz); + } + + oldIndex = currentIndex; + return new ChangedClassesBuildItem(changedClassesNewVersion, changedClassesOldVersion, deletedClasses, addedClasses); + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java index cdbeb0c057285..f924e1e2fc357 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java @@ -1,5 +1,7 @@ package io.quarkus.deployment.steps; +import java.io.File; +import java.io.FileOutputStream; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; @@ -7,6 +9,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -16,44 +19,73 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.function.BiFunction; +import java.util.stream.Collectors; import org.jboss.logging.Logger; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; +import io.quarkus.bootstrap.BootstrapDebug; import io.quarkus.bootstrap.classloading.ClassPathElement; import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.deployment.QuarkusClassWriter; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.builditem.LiveReloadBuildItem; import io.quarkus.deployment.builditem.TransformedClassesBuildItem; import io.quarkus.deployment.index.ConstPoolScanner; +import io.quarkus.runtime.LaunchMode; public class ClassTransformingBuildStep { private static final Logger log = Logger.getLogger(ClassTransformingBuildStep.class); + /** + * Cache used for dev mode to save the result for classes that have not changed. + */ + private static final Map transformedClassesCache = new ConcurrentHashMap<>(); + private static volatile BiFunction lastTransformers; + + public static byte[] transform(String className, byte[] classData) { + if (lastTransformers == null) { + return classData; + } + + return lastTransformers.apply(className, classData); + } + @BuildStep TransformedClassesBuildItem handleClassTransformation(List bytecodeTransformerBuildItems, - ApplicationArchivesBuildItem appArchives) throws ExecutionException, InterruptedException { + ApplicationArchivesBuildItem appArchives, LiveReloadBuildItem liveReloadBuildItem, + LaunchModeBuildItem launchModeBuildItem) + throws ExecutionException, InterruptedException { if (bytecodeTransformerBuildItems.isEmpty()) { return new TransformedClassesBuildItem(Collections.emptyMap()); } - final Map>> bytecodeTransformers = new HashMap<>( + final Map> bytecodeTransformers = new HashMap<>( bytecodeTransformerBuildItems.size()); Set noConstScanning = new HashSet<>(); Map> constScanning = new HashMap<>(); + Set eager = new HashSet<>(); + Set nonCacheable = new HashSet<>(); for (BytecodeTransformerBuildItem i : bytecodeTransformerBuildItems) { bytecodeTransformers.computeIfAbsent(i.getClassToTransform(), (h) -> new ArrayList<>()) - .add(i.getVisitorFunction()); + .add(i); if (i.getRequireConstPoolEntry() == null || i.getRequireConstPoolEntry().isEmpty()) { noConstScanning.add(i.getClassToTransform()); } else { constScanning.computeIfAbsent(i.getClassToTransform(), (s) -> new HashSet<>()) .addAll(i.getRequireConstPoolEntry()); } + if (i.isEager()) { + eager.add(i.getClassToTransform()); + } + if (!i.isCacheable()) { + nonCacheable.add(i.getClassToTransform()); + } } QuarkusClassLoader cl = (QuarkusClassLoader) Thread.currentThread().getContextClassLoader(); Map transformedToArchive = new ConcurrentHashMap<>(); @@ -62,22 +94,81 @@ TransformedClassesBuildItem handleClassTransformation(List> transformed = new ConcurrentLinkedDeque<>(); + final Map> transformedClassesByJar = new HashMap<>(); + ClassLoader transformCl = Thread.currentThread().getContextClassLoader(); + lastTransformers = new BiFunction() { + @Override + public byte[] apply(String className, byte[] originalBytes) { + + List classTransformers = bytecodeTransformers.get(className); + if (classTransformers == null) { + return originalBytes; + } + List> visitors = classTransformers.stream() + .map(BytecodeTransformerBuildItem::getVisitorFunction).filter(Objects::nonNull) + .collect(Collectors.toList()); + List> preVisitFunctions = classTransformers.stream() + .map(BytecodeTransformerBuildItem::getInputTransformer).filter(Objects::nonNull) + .collect(Collectors.toList()); + ClassLoader old = Thread.currentThread().getContextClassLoader(); + try { + Thread.currentThread().setContextClassLoader(transformCl); + String classFileName = className.replace(".", "/") + ".class"; + List archives = cl.getElementsWithResource(classFileName); + if (!archives.isEmpty()) { + ClassPathElement classPathElement = archives.get(0); + Path jar = classPathElement.getRoot(); + byte[] classData = classPathElement.getResource(classFileName).getData(); + Set constValues = constScanning.get(className); + if (constValues != null && !noConstScanning.contains(className)) { + if (!ConstPoolScanner.constPoolEntryPresent(classData, constValues)) { + return originalBytes; + } + } + byte[] data = transformClass(className, visitors, classData, preVisitFunctions); + TransformedClassesBuildItem.TransformedClass transformedClass = new TransformedClassesBuildItem.TransformedClass( + className, data, + classFileName, eager.contains(className)); + return transformedClass.getData(); + } else { + return originalBytes; + } + } finally { + Thread.currentThread().setContextClassLoader(old); + } + } + }; try { - ClassLoader transformCl = Thread.currentThread().getContextClassLoader(); - for (Map.Entry>> entry : bytecodeTransformers + for (Map.Entry> entry : bytecodeTransformers .entrySet()) { String className = entry.getKey(); + boolean cacheable = !nonCacheable.contains(className); + if (cacheable && transformedClassesCache.containsKey(className)) { + if (liveReloadBuildItem.getChangeInformation() != null) { + if (!liveReloadBuildItem.getChangeInformation().getChangedClasses().contains(className)) { + //we can use the cached transformation + handleTransformedClass(transformedToArchive, transformedClassesByJar, + transformedClassesCache.get(className)); + continue; + } + } + } String classFileName = className.replace(".", "/") + ".class"; List archives = cl.getElementsWithResource(classFileName); if (!archives.isEmpty()) { ClassPathElement classPathElement = archives.get(0); Path jar = classPathElement.getRoot(); if (jar == null) { - log.warnf("Cannot transform %s as it's containing application archive could not be found.", + log.warnf("Cannot transform %s as its containing application archive could not be found.", entry.getKey()); continue; } - List> visitors = entry.getValue(); + List> visitors = entry.getValue().stream() + .map(BytecodeTransformerBuildItem::getVisitorFunction).filter(Objects::nonNull) + .collect(Collectors.toList()); + List> preVisitFunctions = entry.getValue().stream() + .map(BytecodeTransformerBuildItem::getInputTransformer).filter(Objects::nonNull) + .collect(Collectors.toList()); transformedToArchive.put(classFileName, jar); transformed.add(executorPool.submit(new Callable() { @Override @@ -85,30 +176,28 @@ public TransformedClassesBuildItem.TransformedClass call() throws Exception { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(transformCl); - byte[] classData = classPathElement.getResource(classFileName).getData(); Set constValues = constScanning.get(className); + byte[] classData = classPathElement.getResource(classFileName).getData(); if (constValues != null && !noConstScanning.contains(className)) { if (!ConstPoolScanner.constPoolEntryPresent(classData, constValues)) { return null; } } - ClassReader cr = new ClassReader(classData); - ClassWriter writer = new QuarkusClassWriter(cr, - ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); - ClassVisitor visitor = writer; - for (BiFunction i : visitors) { - visitor = i.apply(className, visitor); + byte[] data = transformClass(className, visitors, classData, preVisitFunctions); + TransformedClassesBuildItem.TransformedClass transformedClass = new TransformedClassesBuildItem.TransformedClass( + className, data, + classFileName, eager.contains(className)); + if (cacheable && launchModeBuildItem.getLaunchMode() == LaunchMode.DEVELOPMENT) { + transformedClassesCache.put(className, transformedClass); } - cr.accept(visitor, 0); - return new TransformedClassesBuildItem.TransformedClass(writer.toByteArray(), - classFileName); + return transformedClass; } finally { Thread.currentThread().setContextClassLoader(old); } } })); } else { - log.warnf("Cannot transform %s as it's containing application archive could not be found.", + log.warnf("Cannot transform %s as its containing application archive could not be found.", entry.getKey()); } } @@ -116,17 +205,58 @@ public TransformedClassesBuildItem.TransformedClass call() throws Exception { } finally { executorPool.shutdown(); } - Map> transformedClassesByJar = new HashMap<>(); if (!transformed.isEmpty()) { for (Future i : transformed) { final TransformedClassesBuildItem.TransformedClass res = i.get(); if (res != null) { - transformedClassesByJar.computeIfAbsent(transformedToArchive.get(res.getFileName()), (a) -> new HashSet<>()) - .add(res); + handleTransformedClass(transformedToArchive, transformedClassesByJar, res); } } } return new TransformedClassesBuildItem(transformedClassesByJar); } + private byte[] transformClass(String className, List> visitors, + byte[] classData, List> preVisitFunctions) { + for (BiFunction i : preVisitFunctions) { + classData = i.apply(className, classData); + } + byte[] data; + if (!visitors.isEmpty()) { + ClassReader cr = new ClassReader(classData); + ClassWriter writer = new QuarkusClassWriter(cr, + ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); + ClassVisitor visitor = writer; + for (BiFunction i : visitors) { + visitor = i.apply(className, visitor); + } + cr.accept(visitor, 0); + data = writer.toByteArray(); + } else { + data = classData; + } + if (BootstrapDebug.DEBUG_TRANSFORMED_CLASSES_DIR != null) { + File debugPath = new File(BootstrapDebug.DEBUG_TRANSFORMED_CLASSES_DIR); + if (!debugPath.exists()) { + debugPath.mkdir(); + } + File classFile = new File(debugPath, className.replace(".", "/") + ".class"); + classFile.getParentFile().mkdirs(); + try (FileOutputStream classWriter = new FileOutputStream(classFile)) { + classWriter.write(data); + } catch (Exception e) { + log.errorf(e, "Failed to write transformed class %s", className); + } + log.infof("Wrote transformed class to %s", classFile.getAbsolutePath()); + } + return data; + } + + private void handleTransformedClass(Map transformedToArchive, + Map> transformedClassesByJar, + TransformedClassesBuildItem.TransformedClass res) { + transformedClassesByJar.computeIfAbsent(transformedToArchive.get(res.getFileName()), (a) -> new HashSet<>()) + .add(res); + } + } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/CombinedIndexBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/CombinedIndexBuildStep.java index 2c6730c3257d7..922b36d759ff4 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/CombinedIndexBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/CombinedIndexBuildStep.java @@ -15,13 +15,18 @@ import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; +import io.quarkus.deployment.builditem.LiveReloadBuildItem; +import io.quarkus.deployment.dev.RuntimeUpdatesProcessor; +import io.quarkus.deployment.index.IndexWrapper; import io.quarkus.deployment.index.IndexingUtil; +import io.quarkus.deployment.index.PersistentClassIndex; public class CombinedIndexBuildStep { @BuildStep CombinedIndexBuildItem build(ApplicationArchivesBuildItem archives, - List additionalIndexedClassesItems) { + List additionalIndexedClassesItems, + LiveReloadBuildItem liveReloadBuildItem) { List archiveIndexes = new ArrayList<>(); for (ApplicationArchive i : archives.getAllApplicationArchives()) { @@ -40,7 +45,16 @@ CombinedIndexBuildItem build(ApplicationArchivesBuildItem archives, } } - return new CombinedIndexBuildItem(CompositeIndex.create(archivesIndex, indexer.complete())); + PersistentClassIndex index = liveReloadBuildItem.getContextObject(PersistentClassIndex.class); + if (index == null) { + index = new PersistentClassIndex(); + liveReloadBuildItem.setContextObject(PersistentClassIndex.class, index); + } + + CompositeIndex compositeIndex = CompositeIndex.create(archivesIndex, indexer.complete()); + RuntimeUpdatesProcessor.setLastStartIndex(compositeIndex); + return new CombinedIndexBuildItem(compositeIndex, + new IndexWrapper(compositeIndex, Thread.currentThread().getContextClassLoader(), index)); } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigBuildSteps.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigBuildSteps.java index 84747381902a9..0699b8d2d6d95 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigBuildSteps.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigBuildSteps.java @@ -27,6 +27,9 @@ import io.quarkus.gizmo.MethodDescriptor; import io.quarkus.gizmo.ResultHandle; import io.quarkus.runtime.graal.InetRunTime; +import io.smallrye.config.ConfigSourceFactory; +import io.smallrye.config.ConfigSourceInterceptor; +import io.smallrye.config.ConfigSourceInterceptorFactory; import io.smallrye.config.SmallRyeConfigProviderResolver; class ConfigBuildSteps { @@ -82,7 +85,10 @@ void nativeServiceProviders( for (Class serviceClass : Arrays.asList( ConfigSource.class, ConfigSourceProvider.class, - Converter.class)) { + Converter.class, + ConfigSourceInterceptor.class, + ConfigSourceInterceptorFactory.class, + ConfigSourceFactory.class)) { final String serviceName = serviceClass.getName(); final Set names = ServiceUtil.classNamesNamedIn(classLoader, SERVICES_PREFIX + serviceName); final List list = names.stream() diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java index 4441a6def5f56..4a17e455e53e5 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java @@ -29,9 +29,10 @@ import io.quarkus.deployment.configuration.definition.RootDefinition; import io.quarkus.deployment.logging.LoggingSetupBuildItem; import io.quarkus.gizmo.ClassOutput; -import io.quarkus.runtime.ConfigChangeRecorder; import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.configuration.ConfigChangeRecorder; +import io.quarkus.runtime.configuration.ConfigurationRuntimeConfig; public class ConfigGenerationBuildStep { @@ -82,7 +83,8 @@ private List getAdditionalBootstrapConfigSourceProviders( @BuildStep @Record(ExecutionTime.RUNTIME_INIT) public void checkForBuildTimeConfigChange( - ConfigChangeRecorder recorder, ConfigurationBuildItem configItem, LoggingSetupBuildItem loggingSetupBuildItem) { + ConfigChangeRecorder recorder, ConfigurationBuildItem configItem, LoggingSetupBuildItem loggingSetupBuildItem, + ConfigurationRuntimeConfig configurationConfig) { BuildTimeConfigurationReader.ReadResult readResult = configItem.getReadResult(); Config config = ConfigProvider.getConfig(); @@ -96,7 +98,7 @@ public void checkForBuildTimeConfigChange( } } values.remove("quarkus.profile"); - recorder.handleConfigChange(values); + recorder.handleConfigChange(configurationConfig, values); } private void handleMembers(Config config, Map values, Iterable members, diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/MainClassBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/MainClassBuildStep.java index 9d639e89f81a7..433b625a7071e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/MainClassBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/MainClassBuildStep.java @@ -25,9 +25,9 @@ import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; import org.jboss.logging.Logger; -import org.jboss.logmanager.handlers.DelayedHandler; import io.quarkus.bootstrap.logging.InitialConfigurator; +import io.quarkus.bootstrap.logging.QuarkusDelayedHandler; import io.quarkus.bootstrap.runner.Timing; import io.quarkus.builder.Version; import io.quarkus.deployment.GeneratedClassGizmoAdaptor; @@ -94,7 +94,6 @@ public class MainClassBuildStep { JAVAX_NET_SSL_TRUST_STORE_PASSWORD)); public static final String GENERATE_APP_CDS_SYSTEM_PROPERTY = "quarkus.appcds.generate"; - public static final String PRINT_STARTUP_TIMES_PROPERTY = "quarkus.debug.print-startup-times"; private static final FieldDescriptor STARTUP_CONTEXT_FIELD = FieldDescriptor.of(Application.APP_CLASS_NAME, STARTUP_CONTEXT, StartupContext.class); @@ -286,19 +285,17 @@ void build(List staticInitTasks, activeProfile, tryBlock.load(LaunchMode.DEVELOPMENT.equals(launchMode.getLaunchMode()))); cb = tryBlock.addCatch(Throwable.class); - cb.invokeVirtualMethod(ofMethod(Logger.class, "errorv", void.class, Throwable.class, String.class, Object.class), - cb.readStaticField(logField.getFieldDescriptor()), cb.getCaughtException(), - cb.load("Failed to start application (with profile {0})"), activeProfile); // an exception was thrown before logging was actually setup, we simply dump everything to the console ResultHandle delayedHandler = cb - .readStaticField(FieldDescriptor.of(InitialConfigurator.class, "DELAYED_HANDLER", DelayedHandler.class)); - ResultHandle isActivated = cb.invokeVirtualMethod(ofMethod(DelayedHandler.class, "isActivated", boolean.class), + .readStaticField(FieldDescriptor.of(InitialConfigurator.class, "DELAYED_HANDLER", QuarkusDelayedHandler.class)); + ResultHandle isActivated = cb.invokeVirtualMethod(ofMethod(QuarkusDelayedHandler.class, "isActivated", boolean.class), delayedHandler); BytecodeCreator isActivatedFalse = cb.ifNonZero(isActivated).falseBranch(); ResultHandle handlersArray = isActivatedFalse.newArray(Handler.class, 1); isActivatedFalse.writeArrayValue(handlersArray, 0, isActivatedFalse.newInstance(ofConstructor(ConsoleHandler.class))); - isActivatedFalse.invokeVirtualMethod(ofMethod(DelayedHandler.class, "setHandlers", Handler[].class, Handler[].class), + isActivatedFalse.invokeVirtualMethod( + ofMethod(QuarkusDelayedHandler.class, "setHandlers", Handler[].class, Handler[].class), delayedHandler, handlersArray); isActivatedFalse.breakScope(); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageAutoFeatureStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageAutoFeatureStep.java index c3a0aedb5922b..3cb12a9ffb3f4 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageAutoFeatureStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageAutoFeatureStep.java @@ -2,26 +2,17 @@ import static io.quarkus.gizmo.MethodDescriptor.ofMethod; -import java.io.File; -import java.io.IOException; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Constructor; import java.lang.reflect.Executable; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.hosted.Feature; @@ -35,11 +26,12 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageProxyDefinitionBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBundleBuildItem; -import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceDirectoryBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourcePatternsBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveFieldBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedPackageBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.builditem.nativeimage.UnsafeAccessedFieldBuildItem; @@ -57,49 +49,33 @@ public class NativeImageAutoFeatureStep { private static final String GRAAL_AUTOFEATURE = "io/quarkus/runner/AutoFeature"; private static final MethodDescriptor IMAGE_SINGLETONS_LOOKUP = ofMethod(ImageSingletons.class, "lookup", Object.class, Class.class); - private static final MethodDescriptor INITIALIZE_AT_RUN_TIME = ofMethod(RuntimeClassInitialization.class, + private static final MethodDescriptor INITIALIZE_CLASSES_AT_RUN_TIME = ofMethod(RuntimeClassInitialization.class, "initializeAtRunTime", void.class, Class[].class); + private static final MethodDescriptor INITIALIZE_PACKAGES_AT_RUN_TIME = ofMethod(RuntimeClassInitialization.class, + "initializeAtRunTime", void.class, String[].class); private static final MethodDescriptor RERUN_INITIALIZATION = ofMethod( "org.graalvm.nativeimage.impl.RuntimeClassInitializationSupport", "rerunInitialization", void.class, Class.class, String.class); + private static final MethodDescriptor RESOURCES_REGISTRY_ADD_RESOURCES = ofMethod( + "com.oracle.svm.core.configure.ResourcesRegistry", + "addResources", void.class, String.class); + private static final MethodDescriptor RESOURCES_REGISTRY_IGNORE_RESOURCES = ofMethod( + "com.oracle.svm.core.configure.ResourcesRegistry", + "ignoreResources", void.class, String.class); static final String RUNTIME_REFLECTION = RuntimeReflection.class.getName(); static final String JNI_RUNTIME_ACCESS = "com.oracle.svm.core.jni.JNIRuntimeAccess"; static final String BEFORE_ANALYSIS_ACCESS = Feature.BeforeAnalysisAccess.class.getName(); static final String DYNAMIC_PROXY_REGISTRY = "com.oracle.svm.core.jdk.proxy.DynamicProxyRegistry"; static final String LOCALIZATION_FEATURE = "com.oracle.svm.core.jdk.LocalizationFeature"; - @BuildStep - List registerPackageResources( - List nativeImageResourceDirectories) - throws IOException, URISyntaxException { - List resources = new ArrayList<>(); - - for (NativeImageResourceDirectoryBuildItem nativeImageResourceDirectory : nativeImageResourceDirectories) { - String path = Thread.currentThread().getContextClassLoader().getResource(nativeImageResourceDirectory.getPath()) - .getPath(); - File resourceFile = Paths.get(new URL(path.substring(0, path.indexOf("!"))).toURI()).toFile(); - try (JarFile jarFile = new JarFile(resourceFile)) { - Enumeration entries = jarFile.entries(); - while (entries.hasMoreElements()) { - JarEntry entry = entries.nextElement(); - String resourceName = entry.getName(); - if (!entry.isDirectory() && resourceName.startsWith(nativeImageResourceDirectory.getPath()) - && !resourceName.endsWith(".class")) { - resources.add(new NativeImageResourceBuildItem(resourceName)); - } - } - } - } - - return resources; - } - @BuildStep void generateFeature(BuildProducer nativeImageClass, List runtimeInitializedClassBuildItems, + List runtimeInitializedPackageBuildItems, List runtimeReinitializedClassBuildItems, List proxies, List resources, + List resourcePatterns, List resourceBundles, List reflectiveMethods, List reflectiveFields, @@ -152,7 +128,20 @@ public void write(String s, byte[] bytes) { CatchBlockCreator cc = tc.addCatch(Throwable.class); cc.invokeVirtualMethod(ofMethod(Throwable.class, "printStackTrace", void.class), cc.getCaughtException()); } - overallCatch.invokeStaticMethod(INITIALIZE_AT_RUN_TIME, classes); + overallCatch.invokeStaticMethod(INITIALIZE_CLASSES_AT_RUN_TIME, classes); + } + + if (!runtimeInitializedPackageBuildItems.isEmpty()) { + ResultHandle packages = overallCatch.newArray(String.class, + overallCatch.load(runtimeInitializedPackageBuildItems.size())); + for (int i = 0; i < runtimeInitializedPackageBuildItems.size(); i++) { + TryBlock tc = overallCatch.tryBlock(); + ResultHandle pkg = tc.load(runtimeInitializedPackageBuildItems.get(i).getPackageName()); + tc.writeArrayValue(packages, i, pkg); + CatchBlockCreator cc = tc.addCatch(Throwable.class); + cc.invokeVirtualMethod(ofMethod(Throwable.class, "printStackTrace", void.class), cc.getCaughtException()); + } + overallCatch.invokeStaticMethod(INITIALIZE_PACKAGES_AT_RUN_TIME, packages); } // hack in reinitialization of process info classes @@ -201,6 +190,27 @@ public void write(String s, byte[] bytes) { } } + /* Resource includes and excludes */ + if (!resourcePatterns.isEmpty()) { + ResultHandle resourcesRegistrySingleton = overallCatch.invokeStaticMethod(IMAGE_SINGLETONS_LOOKUP, + overallCatch.loadClass("com.oracle.svm.core.configure.ResourcesRegistry")); + TryBlock tc = overallCatch.tryBlock(); + for (NativeImageResourcePatternsBuildItem resourcePatternsItem : resourcePatterns) { + for (String pattern : resourcePatternsItem.getExcludePatterns()) { + tc.invokeInterfaceMethod(RESOURCES_REGISTRY_IGNORE_RESOURCES, resourcesRegistrySingleton, + overallCatch.load(pattern)); + } + for (String pattern : resourcePatternsItem.getIncludePatterns()) { + tc.invokeInterfaceMethod( + RESOURCES_REGISTRY_ADD_RESOURCES, + resourcesRegistrySingleton, + tc.load(pattern)); + } + } + CatchBlockCreator cc = tc.addCatch(Throwable.class); + cc.invokeVirtualMethod(ofMethod(Throwable.class, "printStackTrace", void.class), cc.getCaughtException()); + } + for (ServiceProviderBuildItem i : serviceProviderBuildItems) { overallCatch.invokeStaticMethod(ofMethod(ResourceHelper.class, "registerResources", void.class, String.class), overallCatch.load(i.serviceDescriptorFile())); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageConfigBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageConfigBuildStep.java index 74bd913c1eb8c..f7ab42243b0d3 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageConfigBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageConfigBuildStep.java @@ -15,7 +15,6 @@ import io.quarkus.deployment.builditem.JavaLibraryPathAdditionalPathBuildItem; import io.quarkus.deployment.builditem.JniBuildItem; import io.quarkus.deployment.builditem.NativeImageEnableAllCharsetsBuildItem; -import io.quarkus.deployment.builditem.NativeImageEnableAllTimeZonesBuildItem; import io.quarkus.deployment.builditem.SslNativeConfigBuildItem; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageConfigBuildItem; @@ -24,6 +23,7 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageSystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; +import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; import io.quarkus.runtime.ssl.SslContextConfigurationRecorder; //TODO: this should go away, once we decide on which one of the API's we want @@ -38,7 +38,6 @@ void build(SslContextConfigurationRecorder sslContextConfigurationRecorder, SslNativeConfigBuildItem sslNativeConfig, List jniBuildItems, List nativeImageEnableAllCharsetsBuildItems, - List nativeImageEnableAllTimeZonesBuildItems, List extensionSslNativeSupport, List enableAllSecurityServicesBuildItems, BuildProducer proxy, @@ -88,10 +87,15 @@ void build(SslContextConfigurationRecorder sslContextConfigurationRecorder, if (!nativeImageEnableAllCharsetsBuildItems.isEmpty()) { nativeImage.produce(new NativeImageSystemPropertyBuildItem("quarkus.native.enable-all-charsets", "true")); } + } - if (!nativeImageEnableAllTimeZonesBuildItems.isEmpty()) { - nativeImage.produce(new NativeImageSystemPropertyBuildItem("quarkus.native.enable-all-timezones", "true")); - } + @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) + void reinitHostNameUtil(BuildProducer runtimeReInitClass) { + // certain libraries like JBoss logging internally use this class to determine the hostname + // of the system. This HostName class computes and stores the hostname as a static field in a class, + // so we reinitialize this to re-compute the field (and other related fields) during native application's + // runtime + runtimeReInitClass.produce(new RuntimeReinitializedClassBuildItem("org.wildfly.common.net.HostName")); } private Boolean isSslNativeEnabled(SslNativeConfigBuildItem sslNativeConfig, diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageResourcesStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageResourcesStep.java new file mode 100644 index 0000000000000..c04a653016728 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageResourcesStep.java @@ -0,0 +1,65 @@ +package io.quarkus.deployment.steps; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.Optional; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceDirectoryBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourcePatternsBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourcePatternsBuildItem.Builder; +import io.quarkus.deployment.pkg.NativeConfig; + +public class NativeImageResourcesStep { + + @BuildStep + List registerPackageResources( + List nativeImageResourceDirectories) + throws IOException, URISyntaxException { + List resources = new ArrayList<>(); + + for (NativeImageResourceDirectoryBuildItem nativeImageResourceDirectory : nativeImageResourceDirectories) { + String path = Thread.currentThread().getContextClassLoader().getResource(nativeImageResourceDirectory.getPath()) + .getPath(); + File resourceFile = Paths.get(new URL(path.substring(0, path.indexOf("!"))).toURI()).toFile(); + try (JarFile jarFile = new JarFile(resourceFile)) { + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry entry = entries.nextElement(); + String resourceName = entry.getName(); + if (!entry.isDirectory() && resourceName.startsWith(nativeImageResourceDirectory.getPath()) + && !resourceName.endsWith(".class")) { + resources.add(new NativeImageResourceBuildItem(resourceName)); + } + } + } + } + + return resources; + } + + @BuildStep + void forwardResourcePatternConfigToBuildItem( + NativeConfig nativeConfig, + BuildProducer nativeImageResourcePatterns) { + + final Optional> includes = nativeConfig.resources.includes; + final Optional> excludes = nativeConfig.resources.excludes; + if (includes.isPresent() || excludes.isPresent()) { + final Builder builder = NativeImageResourcePatternsBuildItem.builder(); + includes.ifPresent(builder::includeGlobs); + excludes.ifPresent(builder::excludeGlobs); + nativeImageResourcePatterns.produce(builder.build()); + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ReflectiveHierarchyStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ReflectiveHierarchyStep.java index 720f4c788b530..0ddd07023e62f 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ReflectiveHierarchyStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ReflectiveHierarchyStep.java @@ -31,6 +31,7 @@ import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassFinalFieldsWritablePredicateBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyIgnoreWarningBuildItem; +import io.quarkus.deployment.util.JandexUtil; public class ReflectiveHierarchyStep { @@ -89,7 +90,7 @@ public void build(CombinedIndexBuildItem combinedIndexBuildItem, log.warnf( "Unable to properly register the hierarchy of the following classes for reflection as they are not in the Jandex index:%n%s" + "%nConsider adding them to the index either by creating a Jandex index " - + "for your dependency via the Maven plugin, an empty META-INF/beans.xml or quarkus.index-dependency properties.\");.", + + "for your dependency via the Maven plugin, an empty META-INF/beans.xml or quarkus.index-dependency properties.", unindexedClassesWarn.toString()); } } @@ -119,21 +120,23 @@ private void addReflectiveHierarchy(CombinedIndexBuildItem combinedIndexBuildIte type instanceof UnresolvedTypeVariable) { return; } else if (type instanceof ClassType) { - if (reflectiveHierarchyBuildItem.getIgnorePredicate().test(type.name())) { + if (reflectiveHierarchyBuildItem.getIgnoreTypePredicate().test(type.name())) { return; } - addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, type.name(), + addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, type.name(), type.name(), processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); for (ClassInfo subclass : combinedIndexBuildItem.getIndex().getAllKnownSubclasses(type.name())) { addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, subclass.name(), + subclass.name(), processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); } for (ClassInfo subclass : combinedIndexBuildItem.getIndex().getAllKnownImplementors(type.name())) { addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, subclass.name(), + subclass.name(), processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); } @@ -144,8 +147,9 @@ private void addReflectiveHierarchy(CombinedIndexBuildItem combinedIndexBuildIte unindexedClasses, finalFieldsWritable, reflectiveClass); } else if (type instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) type; - if (!reflectiveHierarchyBuildItem.getIgnorePredicate().test(parameterizedType.name())) { + if (!reflectiveHierarchyBuildItem.getIgnoreTypePredicate().test(parameterizedType.name())) { addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, parameterizedType.name(), + parameterizedType.name(), processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); } @@ -161,11 +165,15 @@ private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem ReflectiveHierarchyBuildItem reflectiveHierarchyBuildItem, String source, DotName name, + DotName initialName, Set processedReflectiveHierarchies, Map> unindexedClasses, Predicate finalFieldsWritable, BuildProducer reflectiveClass) { - if (reflectiveHierarchyBuildItem.getIgnorePredicate().test(name)) { + if (name == null) { + return; + } + if (reflectiveHierarchyBuildItem.getIgnoreTypePredicate().test(name)) { return; } @@ -195,20 +203,43 @@ private void addClassTypeHierarchy(CombinedIndexBuildItem combinedIndexBuildItem return; } - addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, info.superName(), + addClassTypeHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, info.superName(), initialName, processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); for (FieldInfo field : info.fields()) { + if (reflectiveHierarchyBuildItem.getIgnoreFieldPredicate().test(field)) { + continue; + } if (Modifier.isStatic(field.flags()) || field.name().startsWith("this$") || field.name().startsWith("val$")) { // skip the static fields (especially loggers) // also skip the outer class elements (unfortunately, we don't have a way to test for synthetic fields in Jandex) continue; } - addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, field.type(), + Type fieldType = field.type(); + if ((field.type().kind() == Kind.TYPE_VARIABLE) && (info.typeParameters().size() == 1)) { + // handle the common case where the super type has a generic type in the class signature which + // is completely resolved by the sub type + // this could be made to handle more complex cases, but it is unlikely we will have to do so + if (field.type().asTypeVariable().identifier().equals(info.typeParameters().get(0).identifier())) { + try { + List types = JandexUtil.resolveTypeParameters(initialName, info.name(), + combinedIndexBuildItem.getIndex()); + if (types.size() == 1) { + fieldType = types.get(0); + } + } catch (IllegalArgumentException ignored) { + + } + } + } + addReflectiveHierarchy(combinedIndexBuildItem, reflectiveHierarchyBuildItem, source, fieldType, processedReflectiveHierarchies, unindexedClasses, finalFieldsWritable, reflectiveClass); } for (MethodInfo method : info.methods()) { + if (reflectiveHierarchyBuildItem.getIgnoreMethodPredicate().test(method)) { + continue; + } if (method.parameters().size() > 0 || Modifier.isStatic(method.flags()) || method.returnType().kind() == Kind.VOID) { // we will only consider potential getters diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java index d22ce75cabae0..891e6f4d1b5de 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java @@ -19,14 +19,11 @@ public class RegisterForReflectionBuildStep { private static final Logger log = Logger.getLogger(RegisterForReflectionBuildStep.class); - @Inject - BuildProducer reflectiveClass; - @Inject CombinedIndexBuildItem combinedIndexBuildItem; @BuildStep - public void build() { + public void build(BuildProducer reflectiveClass) { for (AnnotationInstance i : combinedIndexBuildItem.getIndex() .getAnnotations(DotName.createSimple(RegisterForReflection.class.getName()))) { @@ -40,21 +37,21 @@ public void build() { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (targetsValue == null && classNamesValue == null) { ClassInfo classInfo = i.target().asClass(); - registerClass(classLoader, classInfo.name().toString(), methods, fields, ignoreNested); + registerClass(classLoader, classInfo.name().toString(), methods, fields, ignoreNested, reflectiveClass); continue; } if (targetsValue != null) { Type[] targets = targetsValue.asClassArray(); for (Type type : targets) { - registerClass(classLoader, type.name().toString(), methods, fields, ignoreNested); + registerClass(classLoader, type.name().toString(), methods, fields, ignoreNested, reflectiveClass); } } if (classNamesValue != null) { String[] classNames = classNamesValue.asStringArray(); for (String className : classNames) { - registerClass(classLoader, className, methods, fields, ignoreNested); + registerClass(classLoader, className, methods, fields, ignoreNested, reflectiveClass); } } } @@ -64,7 +61,7 @@ public void build() { * BFS Recursive Method to register a class and it's inner classes for Reflection. */ private void registerClass(ClassLoader classLoader, String className, boolean methods, boolean fields, - boolean ignoreNested) { + boolean ignoreNested, final BuildProducer reflectiveClass) { reflectiveClass.produce(new ReflectiveClassBuildItem(methods, fields, className)); if (ignoreNested) { @@ -74,7 +71,7 @@ private void registerClass(ClassLoader classLoader, String className, boolean me try { Class[] declaredClasses = classLoader.loadClass(className).getDeclaredClasses(); for (Class clazz : declaredClasses) { - registerClass(classLoader, clazz.getName(), methods, fields, false); + registerClass(classLoader, clazz.getName(), methods, fields, false, reflectiveClass); } } catch (ClassNotFoundException e) { log.warnf(e, "Failed to load Class %s", className); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/test/TestResourceProvider.java b/core/deployment/src/main/java/io/quarkus/deployment/test/TestResourceProvider.java deleted file mode 100644 index 606f910e9251c..0000000000000 --- a/core/deployment/src/main/java/io/quarkus/deployment/test/TestResourceProvider.java +++ /dev/null @@ -1,7 +0,0 @@ -package io.quarkus.deployment.test; - -public interface TestResourceProvider { - - void inject(Object test); - -} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/ArtifactInfoUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/ArtifactInfoUtil.java new file mode 100644 index 0000000000000..4fc1c5974687f --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/ArtifactInfoUtil.java @@ -0,0 +1,110 @@ +package io.quarkus.deployment.util; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.AbstractMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; + +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; + +public final class ArtifactInfoUtil { + + public static final String DEPLOYMENT = "-deployment"; + + /** + * Returns a Map.Entry containing the groupId and the artifactId of the module the contains the BuildItem + *

+ * The way this works is by depending on the pom.properties file that should be present in the deployment jar + * + * @return the result, or throws + */ + public static Map.Entry groupIdAndArtifactId(Class clazz) { + return groupIdAndArtifactId(clazz, null); + } + + /** + * Returns a Map.Entry containing the groupId and the artifactId of the module the contains the BuildItem + *

+ * The way this works is by depending on the pom.properties file that should be present in the deployment jar + * + * @return the result, or throws + */ + public static Map.Entry groupIdAndArtifactId(Class clazz, + CurateOutcomeBuildItem curateOutcomeBuildItem) { + try { + URL jarLocation = clazz.getProtectionDomain().getCodeSource().getLocation(); + if (jarLocation.toString().endsWith(".jar")) { + try (FileSystem fs = FileSystems.newFileSystem(Paths.get(jarLocation.toURI()), + Thread.currentThread().getContextClassLoader())) { + Entry ret = groupIdAndArtifactId(fs); + if (ret == null) { + throw new RuntimeException("Unable to determine groupId and artifactId of the jar that contains " + + clazz.getName() + " because the jar doesn't contain the necessary metadata"); + } + return ret; + } + } else if (curateOutcomeBuildItem != null) { + // this is needed only for QuarkusDevModeTest inside Quarkus where the class is read from the corresponding directory + Path path = Paths.get(jarLocation.toURI()); + for (AppDependency i : curateOutcomeBuildItem.getEffectiveModel().getFullDeploymentDeps()) { + for (Path p : i.getArtifact().getPaths()) { + if (path.equals(p)) { + + String artifactId = i.getArtifact().getArtifactId(); + if (artifactId.endsWith(DEPLOYMENT)) { + artifactId = artifactId.substring(0, artifactId.length() - DEPLOYMENT.length()); + } + return new AbstractMap.SimpleEntry<>(i.getArtifact().getGroupId(), artifactId); + } + } + } + return new AbstractMap.SimpleEntry<>("unspecified", "unspecified"); + } else { + return new AbstractMap.SimpleEntry<>("unspecified", "unspecified"); + } + } catch (IOException | URISyntaxException e) { + throw new RuntimeException("Unable to determine groupId and artifactId of the jar that contains " + clazz.getName(), + e); + } + } + + /** + * Returns a Map.Entry containing the groupId and the artifactId of the module the contains the BuildItem + *

+ * The way this works is by depending on the pom.properties file that should be present in the deployment jar + * + * @return the result, or null if no maven metadata were found + */ + public static Map.Entry groupIdAndArtifactId(FileSystem fs) throws IOException { + Path metaInfPath = fs.getPath("/META-INF"); + return doGroupIdAndArtifactId(metaInfPath); + } + + private static AbstractMap.SimpleEntry doGroupIdAndArtifactId(Path metaInfPath) throws IOException { + Optional pomProperties = Files.walk(metaInfPath) + .filter(Files::isRegularFile) + .filter(p -> p.toString().endsWith("pom.properties")) + .findFirst(); + if (pomProperties.isPresent()) { + Properties props = new Properties(); + props.load(Files.newInputStream(pomProperties.get())); + String artifactId = props.getProperty("artifactId"); + if (artifactId.endsWith(DEPLOYMENT)) { + artifactId = artifactId.substring(0, artifactId.length() - DEPLOYMENT.length()); + } + return new AbstractMap.SimpleEntry<>(props.getProperty("groupId"), artifactId); + } else { + return null; + } + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/AsmUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/AsmUtil.java index b1d43433b0780..e02454a4ca58b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/util/AsmUtil.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/AsmUtil.java @@ -1,7 +1,21 @@ package io.quarkus.deployment.util; +import static java.util.Arrays.asList; +import static org.objectweb.asm.Type.BOOLEAN_TYPE; +import static org.objectweb.asm.Type.BYTE_TYPE; +import static org.objectweb.asm.Type.CHAR_TYPE; +import static org.objectweb.asm.Type.DOUBLE_TYPE; +import static org.objectweb.asm.Type.FLOAT_TYPE; +import static org.objectweb.asm.Type.INT_TYPE; +import static org.objectweb.asm.Type.LONG_TYPE; +import static org.objectweb.asm.Type.SHORT_TYPE; +import static org.objectweb.asm.Type.VOID_TYPE; +import static org.objectweb.asm.Type.getType; + import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.function.Function; import org.jboss.jandex.ArrayType; @@ -17,9 +31,43 @@ /** * A collection of ASM and Jandex utilities. + * NOTE: this has a copy in AsmUtilCopy in arc-processor with some extra methods for knowing if we need a + * signature and getting the signature of a class. */ public class AsmUtil { + public static final List PRIMITIVES = asList( + VOID_TYPE, + BOOLEAN_TYPE, + CHAR_TYPE, + BYTE_TYPE, + SHORT_TYPE, + INT_TYPE, + FLOAT_TYPE, + LONG_TYPE, + DOUBLE_TYPE); + public static final List WRAPPERS = asList( + getType(Void.class), + getType(Boolean.class), + getType(Character.class), + getType(Byte.class), + getType(Short.class), + getType(Integer.class), + getType(Float.class), + getType(Long.class), + getType(Double.class)); + public static final Map WRAPPER_TO_PRIMITIVE = new HashMap<>(); + + static { + for (int i = 0; i < AsmUtil.PRIMITIVES.size(); i++) { + AsmUtil.WRAPPER_TO_PRIMITIVE.put(AsmUtil.WRAPPERS.get(i), AsmUtil.PRIMITIVES.get(i)); + } + } + + public static org.objectweb.asm.Type autobox(org.objectweb.asm.Type primitive) { + return WRAPPERS.get(primitive.getSort()); + } + /** * Returns the Java bytecode signature of a given Jandex MethodInfo using the given type argument mappings. * For example, given this method: @@ -109,6 +157,21 @@ public static String getDescriptor(Type type, Function typeArgMa return sb.toString(); } + /** + * Returns the Java bytecode signature of a given Jandex Type using the given type argument mappings. + * For example, given this type: List<T>, this will return Ljava/util/List<Ljava/lang/Integer;>; if + * your {@code typeArgMapper} contains {@code T=Ljava/lang/Integer;}. + * + * @param type the type you want the signature for. + * @param typeArgMapper a mapping between type argument names and their bytecode descriptor. + * @return a bytecode signature for that type. + */ + public static String getSignature(Type type, Function typeArgMapper) { + StringBuilder sb = new StringBuilder(); + toSignature(sb, type, typeArgMapper, false); + return sb.toString(); + } + private static void toSignature(StringBuilder sb, Type type, Function typeArgMapper, boolean erased) { switch (type.kind()) { case ARRAY: @@ -228,7 +291,7 @@ public static int getReturnInstruction(String typeDescriptor) { * specialised return instructions IRETURN, LRETURN, FRETURN, DRETURN, RETURN for primitives/void, * and ARETURN otherwise; * - * @param typeDescriptor the return Jandex Type. + * @param jandexType the return Jandex Type. * @return the correct bytecode return instruction for that return type descriptor. */ public static int getReturnInstruction(Type jandexType) { @@ -431,6 +494,43 @@ public static void unboxIfRequired(MethodVisitor mv, Type jandexType) { } } + /** + * Calls the right unboxing method for the given Jandex Type if it is a primitive. + * + * @param mv The MethodVisitor on which to visit the unboxing instructions + * @param type The Jandex Type to unbox if it is a primitive. + */ + public static void unboxIfRequired(MethodVisitor mv, org.objectweb.asm.Type type) { + if (type.getSort() <= org.objectweb.asm.Type.DOUBLE) { + switch (type.getSort()) { + case org.objectweb.asm.Type.BOOLEAN: + unbox(mv, "java/lang/Boolean", "booleanValue", "Z"); + break; + case org.objectweb.asm.Type.BYTE: + unbox(mv, "java/lang/Byte", "byteValue", "B"); + break; + case org.objectweb.asm.Type.CHAR: + unbox(mv, "java/lang/Character", "charValue", "C"); + break; + case org.objectweb.asm.Type.DOUBLE: + unbox(mv, "java/lang/Double", "doubleValue", "D"); + break; + case org.objectweb.asm.Type.FLOAT: + unbox(mv, "java/lang/Float", "floatValue", "F"); + break; + case org.objectweb.asm.Type.INT: + unbox(mv, "java/lang/Integer", "intValue", "I"); + break; + case org.objectweb.asm.Type.LONG: + unbox(mv, "java/lang/Long", "longValue", "J"); + break; + case org.objectweb.asm.Type.SHORT: + unbox(mv, "java/lang/Short", "shortValue", "S"); + break; + } + } + } + private static void unbox(MethodVisitor mv, String owner, String methodName, String returnTypeSignature) { mv.visitTypeInsn(Opcodes.CHECKCAST, owner); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, methodName, "()" + returnTypeSignature, false); @@ -555,4 +655,22 @@ public static void printValueOnStderr(MethodVisitor mv, Runnable valuePusher) { mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream", "println", "(Ljava/lang/Object;)V", false); } + + /** + * Copy the parameter names to the given MethodVisitor, unless we don't have parameter name info + * + * @param mv the visitor to copy to + * @param method the method to copy from + */ + public static void copyParameterNames(MethodVisitor mv, MethodInfo method) { + int parameterSize = method.parameters().size(); + if (parameterSize > 0) { + // perhaps we don't have parameter names + if (method.parameterName(0) == null) + return; + for (int i = 0; i < parameterSize; i++) { + mv.visitParameter(method.parameterName(i), 0 /* modifiers */); + } + } + } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/ExecUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/ExecUtil.java index 910ca80dddb9a..88d7c9b52d8da 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/util/ExecUtil.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/ExecUtil.java @@ -15,20 +15,20 @@ public class ExecUtil { private static final Logger LOG = Logger.getLogger(ExecUtil.class); private static final Function PRINT_OUTPUT = i -> new HandleOutput(i); - private static final Function SILENT = i -> new HandleOutput(i, false); + private static final Function SILENT = i -> new HandleOutput(i, Logger.Level.DEBUG); private static class HandleOutput implements Runnable { private final InputStream is; - private final Boolean logEnabled; + private final Logger.Level logLevel; HandleOutput(InputStream is) { - this(is, true); + this(is, Logger.Level.INFO); } - HandleOutput(InputStream is, Boolean logEnabled) { + HandleOutput(InputStream is, Logger.Level logLevel) { this.is = is; - this.logEnabled = logEnabled; + this.logLevel = LOG.isEnabled(logLevel) ? logLevel : null; } @Override @@ -37,18 +37,21 @@ public void run() { BufferedReader reader = new BufferedReader(isr)) { for (String line = reader.readLine(); line != null; line = reader.readLine()) { - if (logEnabled) { - LOG.info(line); + if (logLevel != null) { + LOG.log(logLevel, line); } } } catch (IOException e) { + if (logLevel != null) { + LOG.log(logLevel, "Failed to handle output", e); + } } } } /** * Execute the specified command from within the current directory. - * + * * @param command The command * @param args The command arguments * @return true if commands where executed successfully @@ -59,7 +62,7 @@ public static boolean exec(String command, String... args) { /** * Execute the specified command from within the current directory and hide the output. - * + * * @param command The command * @param args The command arguments * @return true if commands where executed successfully @@ -70,7 +73,7 @@ public static boolean execSilent(String command, String... args) { /** * Execute the specified command from within the specified directory. - * + * * @param directory The directory * @param command The command * @param args The command arguments @@ -82,7 +85,7 @@ public static boolean exec(File directory, String command, String... args) { /** * Execute the specified command from within the specified directory and hide the output. - * + * * @param directory The directory * @param command The command * @param args The command arguments @@ -95,7 +98,7 @@ public static boolean execSilent(File directory, String command, String... args) /** * Execute the specified command from within the specified directory. * The method allows specifying an output filter that processes the command output. - * + * * @param directory The directory * @param outputFilterFunction A {@link Function} that gets an {@link InputStream} and returns an outputFilter. * @param command The command diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/JavaVersionUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/JavaVersionUtil.java deleted file mode 100644 index d7cac86ddc062..0000000000000 --- a/core/deployment/src/main/java/io/quarkus/deployment/util/JavaVersionUtil.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.quarkus.deployment.util; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class JavaVersionUtil { - - private static final Pattern PATTERN = Pattern.compile("(?:1\\.)?(\\d+)(?:\\..*)?"); - - private static boolean IS_JAVA_11_OR_NEWER; - - static { - Matcher matcher = PATTERN.matcher(System.getProperty("java.version", "")); - IS_JAVA_11_OR_NEWER = !matcher.matches() || Integer.parseInt(matcher.group(1)) >= 11; - - } - - public static boolean isJava11OrHigher() { - return IS_JAVA_11_OR_NEWER; - } -} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/ProcessUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/ProcessUtil.java index 2f18f890adc18..c41acb8967f1c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/util/ProcessUtil.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/ProcessUtil.java @@ -6,12 +6,9 @@ import java.io.InputStreamReader; import java.io.PrintStream; import java.nio.charset.StandardCharsets; -import java.util.Optional; import org.jboss.logging.Logger; -import io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled; - /** * Utility for {@link Process} related operations */ @@ -21,19 +18,19 @@ public class ProcessUtil { /** * Launches and returns a {@link Process} built from the {@link ProcessBuilder builder}. - * Before launching the process, this method checks if {@link ProcessInheritIODisabled inherit IO is disabled} - * and if so, streams both the {@code STDOUT} and {@code STDERR} of the launched process using + * Before launching the process, this method checks if inherit IO is disabled and if so, + * streams both the {@code STDOUT} and {@code STDERR} of the launched process using * {@link #streamToSysOutSysErr(Process)}. Else, it launches the process with {@link ProcessBuilder#inheritIO()} * * @param builder The process builder - * @param processInheritIODisabled Whether or not {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for + * @param shouldRedirectIO Whether or not {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for * launching the process * @return Returns the newly launched process * @throws IOException */ public static Process launchProcess(final ProcessBuilder builder, - final Optional processInheritIODisabled) throws IOException { - if (!processInheritIODisabled.isPresent()) { + final boolean shouldRedirectIO) throws IOException { + if (!shouldRedirectIO) { return builder.inheritIO().start(); } final Process process = builder.redirectOutput(ProcessBuilder.Redirect.PIPE) @@ -46,19 +43,19 @@ public static Process launchProcess(final ProcessBuilder builder, /** * Launches and returns a {@link Process} built from the {@link ProcessBuilder builder}. - * Before launching the process, this method checks if {@link ProcessInheritIODisabled inherit IO is disabled} + * Before launching the process, this method checks if inheritIO is disabled * and if so, streams (only) the {@code STDOUT} of the launched process using {@link #streamOutputToSysOut(Process)} * (Process)}. Else, it launches the process with {@link ProcessBuilder#inheritIO()} * * @param builder The process builder - * @param processInheritIODisabled Whether or not {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for + * @param shouldRedirectIO Whether or not {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for * launching the process * @return Returns the newly launched process * @throws IOException */ public static Process launchProcessStreamStdOut(final ProcessBuilder builder, - final Optional processInheritIODisabled) throws IOException { - if (!processInheritIODisabled.isPresent()) { + boolean shouldRedirectIO) throws IOException { + if (!shouldRedirectIO) { return builder.inheritIO().start(); } final Process process = builder.redirectOutput(ProcessBuilder.Redirect.PIPE) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/UriNormalizationUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/UriNormalizationUtil.java new file mode 100644 index 0000000000000..3a629bf968b16 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/UriNormalizationUtil.java @@ -0,0 +1,115 @@ +package io.quarkus.deployment.util; + +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Common URI path resolution + */ +public class UriNormalizationUtil { + private UriNormalizationUtil() { + } + + /** + * Create a URI path from a string. The specified path can not contain + * relative {@literal ..} segments or {@literal %} characters. + *

+ * Examples: + *

+ * + * + * @param path String to convert into a URI + * @param trailingSlash true if resulting URI must end with a '/' + * @throws IllegalArgumentException if the path contains invalid characters or path segments. + */ + public static URI toURI(String path, boolean trailingSlash) { + try { + // replace inbound // with / + path = path.replaceAll("//", "/"); + // remove trailing slash if result shouldn't have one + if (!trailingSlash && path.endsWith("/")) { + path = path.substring(0, path.length() - 1); + } + + if (path.contains("..") || path.contains("%")) { + throw new IllegalArgumentException("Specified path can not contain '..' or '%'. Path was " + path); + } + URI uri = new URI(path).normalize(); + if (uri.getPath().equals("")) { + return trailingSlash ? new URI("/") : new URI(""); + } else if (trailingSlash && !path.endsWith("/")) { + uri = new URI(uri.getPath() + "/"); + } + return uri; + } catch (URISyntaxException e) { + throw new IllegalArgumentException("Specified path is an invalid URI. Path was " + path, e); + } + } + + /** + * Resolve a string path against a URI base. The specified path can not contain + * relative {@literal ..} segments or {@literal %} characters. + * + * Relative paths will be resolved against the specified base URI. + * Absolute paths will be normalized and returned. + *

+ * Examples: + *

    + *
  • {@code normalizeWithBase(new URI("/"), "example", true)} + * will return a URI with path {@literal /example/}
  • + *
  • {@code normalizeWithBase(new URI("/"), "example", false)} + * will return a URI with an empty path {@literal /example}
  • + *
  • {@code normalizeWithBase(new URI("/"), "/example", true)} + * will return a URI with path {@literal /example/}
  • + *
  • {@code normalizeWithBase(new URI("/"), "/example", false)} + * will return a URI with an empty {@literal /example
  • + * + *
  • {@code normalizeWithBase(new URI("/prefix/"), "example", true)} + * will return a URI with path {@literal /prefix/example/}
  • + *
  • {@code normalizeWithBase(new URI("/prefix/"), "example", false)} + * will return a URI with an empty path {@literal /prefix/example}
  • + *
  • {@code normalizeWithBase(new URI("/prefix/"), "/example", true)} + * will return a URI with path {@literal /example/}
  • + *
  • {@code normalizeWithBase(new URI("/prefix/"), "/example", false)} + * will return a URI with an empty path {@literal /example}
  • + * + *
  • {@code normalizeWithBase(new URI("foo/"), "example", true)} + * will return a URI with path {@literal foo/example/}
  • + *
  • {@code normalizeWithBase(new URI("foo/"), "example", false)} + * will return a URI with an empty path {@literal foo/example}
  • + *
  • {@code normalizeWithBase(new URI("foo/"), "/example", true)} + * will return a URI with path {@literal /example/}
  • + *
  • {@code normalizeWithBase(new URI("foo/"), "/example", false)} + * will return a URI with an empty path {@literal /example}
  • + *
+ * + * @param base URI to resolve relative paths. Use {@link #toURI(String, boolean)} to construct this parameter. + * + * @param segment Relative or absolute path + * @param trailingSlash true if resulting URI must end with a '/' + * @throws IllegalArgumentException if the path contains invalid characters or path segments. + */ + public static URI normalizeWithBase(URI base, String segment, boolean trailingSlash) { + if (segment == null || segment.trim().isEmpty()) { + return base; + } + URI segmentUri = toURI(segment, trailingSlash); + URI resolvedUri = base.resolve(segmentUri); + return resolvedUri; + } + + public static String relativize(String rootPath, String leafPath) { + if (leafPath.startsWith(rootPath)) { + return leafPath.substring(rootPath.length()); + } + + return null; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/util/WebJarUtil.java b/core/deployment/src/main/java/io/quarkus/deployment/util/WebJarUtil.java new file mode 100644 index 0000000000000..18a29e5ef0e54 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/util/WebJarUtil.java @@ -0,0 +1,481 @@ +package io.quarkus.deployment.util; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.nio.charset.StandardCharsets; +import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Scanner; +import java.util.Set; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.ConfigProvider; +import org.jboss.logging.Logger; + +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.bootstrap.model.PathsCollection; +import io.quarkus.bootstrap.util.IoUtils; +import io.quarkus.builder.Version; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; +import io.quarkus.runtime.LaunchMode; +import io.smallrye.common.io.jar.JarFiles; + +/** + * Utility for Web resource related operations + */ +public class WebJarUtil { + + private static final Logger LOG = Logger.getLogger(WebJarUtil.class); + + private static final String TMP_DIR = System.getProperty("java.io.tmpdir"); + private static final String CUSTOM_MEDIA_FOLDER = "META-INF/branding/"; + private static final List IGNORE_LIST = Arrays.asList("logo.png", "favicon.ico", "style.css"); + private static final String CSS = ".css"; + private static final String SNAPSHOT_VERSION = "-SNAPSHOT"; + + private WebJarUtil() { + } + + public static void hotReloadBrandingChanges(CurateOutcomeBuildItem curateOutcomeBuildItem, + LaunchModeBuildItem launchMode, + AppArtifact resourcesArtifact, + Set hotReloadChanges) throws IOException { + + hotReloadBrandingChanges(curateOutcomeBuildItem, launchMode, resourcesArtifact, hotReloadChanges, true); + } + + public static void hotReloadBrandingChanges(CurateOutcomeBuildItem curateOutcomeBuildItem, + LaunchModeBuildItem launchMode, + AppArtifact resourcesArtifact, + Set hotReloadChanges, + boolean useDefaultQuarkusBranding) throws IOException { + + if (launchMode.getLaunchMode().equals(LaunchMode.DEVELOPMENT) && hotReloadChanges != null + && !hotReloadChanges.isEmpty()) { + for (String changedResource : hotReloadChanges) { + if (changedResource.startsWith(CUSTOM_MEDIA_FOLDER)) { + ClassLoader classLoader = WebJarUtil.class.getClassLoader(); + AppArtifact userApplication = curateOutcomeBuildItem.getEffectiveModel().getAppArtifact(); + String fileName = changedResource.replace(CUSTOM_MEDIA_FOLDER, ""); + // a branding file has changed ! + String modulename = getModuleOverrideName(resourcesArtifact, fileName); + if (IGNORE_LIST.contains(fileName) + && isOverride(userApplication.getPaths(), classLoader, fileName, modulename)) { + Path deploymentPath = createResourcesDirectory(userApplication, resourcesArtifact); + Path filePath = deploymentPath.resolve(fileName); + try (InputStream override = insertVariables(userApplication, + getOverride(userApplication.getPaths(), classLoader, + fileName, modulename, useDefaultQuarkusBranding), + fileName)) { + if (override != null) { + createFile(override, filePath); + } + } + } + } + } + } + } + + public static Path copyResourcesForDevOrTest(CurateOutcomeBuildItem curateOutcomeBuildItem, + LaunchModeBuildItem launchMode, + AppArtifact resourcesArtifact, + String rootFolderInJar) + throws IOException { + return copyResourcesForDevOrTest(curateOutcomeBuildItem, launchMode, resourcesArtifact, rootFolderInJar, true); + } + + public static Path copyResourcesForDevOrTest(CurateOutcomeBuildItem curateOutcomeBuildItem, + LaunchModeBuildItem launchMode, + AppArtifact resourcesArtifact, + String rootFolderInJar, + boolean useDefaultQuarkusBranding) + throws IOException { + + rootFolderInJar = normalizeRootFolderInJar(rootFolderInJar); + AppArtifact userApplication = curateOutcomeBuildItem.getEffectiveModel().getAppArtifact(); + + Path deploymentPath = createResourcesDirectory(userApplication, resourcesArtifact); + + // Clean if not in dev mode or if the resources jar is a snapshot version + if (!launchMode.getLaunchMode().equals(LaunchMode.DEVELOPMENT) + || resourcesArtifact.getVersion().contains(SNAPSHOT_VERSION)) { + IoUtils.createOrEmptyDir(deploymentPath); + } + + if (isEmpty(deploymentPath)) { + ClassLoader classLoader = WebJarUtil.class.getClassLoader(); + for (Path p : resourcesArtifact.getPaths()) { + File artifactFile = p.toFile(); + if (artifactFile.isFile()) { + // case of a jar file + try (JarFile jarFile = JarFiles.create(artifactFile)) { + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry entry = entries.nextElement(); + if (entry.getName().startsWith(rootFolderInJar)) { + String fileName = entry.getName().replace(rootFolderInJar, ""); + Path filePath = deploymentPath.resolve(fileName); + if (entry.isDirectory()) { + Files.createDirectories(filePath); + } else { + try (InputStream inputStream = insertVariables(userApplication, + jarFile.getInputStream(entry), fileName)) { + String modulename = getModuleOverrideName(resourcesArtifact, fileName); + if (IGNORE_LIST.contains(fileName) + && isOverride(userApplication.getPaths(), classLoader, fileName, modulename)) { + try (InputStream override = insertVariables(userApplication, + getOverride(userApplication.getPaths(), classLoader, + fileName, modulename, useDefaultQuarkusBranding), + fileName)) { + if (override == null) { + createFile(inputStream, filePath); + } else { + createFile(override, filePath); // Override (either developer supplied or Quarkus) + } + } + } else { + createFile(inputStream, filePath); + } + } + } + } + } + } + } else { + // case of a directory + Path rootFolderToCopy = p.resolve(rootFolderInJar); + if (!Files.isDirectory(rootFolderToCopy)) { + continue; + } + + Files.walkFileTree(rootFolderToCopy, new SimpleFileVisitor() { + @Override + public FileVisitResult preVisitDirectory(final Path dir, + final BasicFileAttributes attrs) throws IOException { + Files.createDirectories(deploymentPath.resolve(rootFolderToCopy.relativize(dir))); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(final Path file, + final BasicFileAttributes attrs) throws IOException { + String fileName = rootFolderToCopy.relativize(file).toString(); + Path targetFilePath = deploymentPath.resolve(rootFolderToCopy.relativize(file)); + + String modulename = getModuleOverrideName(resourcesArtifact, fileName); + if (IGNORE_LIST.contains(fileName) + && isOverride(userApplication.getPaths(), classLoader, fileName, modulename)) { + try (InputStream override = insertVariables(userApplication, + getOverride(userApplication.getPaths(), classLoader, + fileName, modulename, useDefaultQuarkusBranding), + fileName)) { + if (override == null) { + copyFile(userApplication, file, fileName, targetFilePath); + } else { + createFile(override, targetFilePath); // Override (either developer supplied or Quarkus) + } + } + } else { + copyFile(userApplication, file, fileName, targetFilePath); + } + + return FileVisitResult.CONTINUE; + } + }); + } + } + } + return deploymentPath; + } + + public static Map copyResourcesForProduction(CurateOutcomeBuildItem curateOutcomeBuildItem, + AppArtifact artifact, + String rootFolderInJar) throws IOException { + return copyResourcesForProduction(curateOutcomeBuildItem, artifact, rootFolderInJar, true); + } + + public static Map copyResourcesForProduction(CurateOutcomeBuildItem curateOutcomeBuildItem, + AppArtifact artifact, + String rootFolderInJar, + boolean useDefaultQuarkusBranding) throws IOException { + rootFolderInJar = normalizeRootFolderInJar(rootFolderInJar); + AppArtifact userApplication = curateOutcomeBuildItem.getEffectiveModel().getAppArtifact(); + + Map map = new HashMap<>(); + //we are including in a production artifact + //just stick the files in the generated output + //we could do this for dev mode as well but then we need to extract them every time + + ClassLoader classLoader = WebJarUtil.class.getClassLoader(); + for (Path p : artifact.getPaths()) { + File artifactFile = p.toFile(); + try (JarFile jarFile = JarFiles.create(artifactFile)) { + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry entry = entries.nextElement(); + if (entry.getName().startsWith(rootFolderInJar) && !entry.isDirectory()) { + String filename = entry.getName().replace(rootFolderInJar, ""); + try (InputStream inputStream = insertVariables(userApplication, jarFile.getInputStream(entry), + filename)) { + byte[] content = null; + String modulename = getModuleOverrideName(artifact, filename); + if (IGNORE_LIST.contains(filename) + && isOverride(userApplication.getPaths(), classLoader, filename, modulename)) { + try (InputStream resourceAsStream = insertVariables(userApplication, + getOverride(userApplication.getPaths(), classLoader, + filename, modulename, useDefaultQuarkusBranding), + filename)) { + if (resourceAsStream != null) { + content = IoUtil.readBytes(resourceAsStream); // Override (either developer supplied or Quarkus) + } + } + } + if (content == null) { + content = FileUtil.readFileContents(inputStream); + } + + map.put(filename, content); + } + } + } + } + } + return map; + } + + public static void updateFile(Path original, byte[] newContent) throws IOException { + try (ByteArrayInputStream bais = new ByteArrayInputStream(newContent)) { + createFile(bais, original); + } + } + + public static void updateUrl(Path original, String path, String lineStartsWith, String format) throws IOException { + String content = new String(Files.readAllBytes(original), StandardCharsets.UTF_8); + String result = updateUrl(content, path, lineStartsWith, format); + if (result != null && !result.equals(content)) { + Files.write(original, result.getBytes(StandardCharsets.UTF_8)); + } + } + + public static String updateUrl(String original, String path, String lineStartsWith, String format) { + try (Scanner scanner = new Scanner(original)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + if (line.trim().startsWith(lineStartsWith)) { + String newLine = String.format(format, path); + return original.replace(line.trim(), newLine); + } + } + } + + return original; + } + + public static AppArtifact getAppArtifact(CurateOutcomeBuildItem curateOutcomeBuildItem, String groupId, String artifactId) { + for (AppDependency dep : curateOutcomeBuildItem.getEffectiveModel().getFullDeploymentDeps()) { + if (dep.getArtifact().getArtifactId().equals(artifactId) + && dep.getArtifact().getGroupId().equals(groupId)) { + return dep.getArtifact(); + } + } + throw new RuntimeException("Could not find artifact " + groupId + ":" + artifactId + + " among the application dependencies"); + } + + private static void copyFile(AppArtifact appArtifact, Path file, String fileName, Path targetFilePath) throws IOException { + InputStream providedContent = pathToStream(file).orElse(null); + if (providedContent != null) { + Files.copy(insertVariables(appArtifact, providedContent, fileName), targetFilePath); + } + } + + private static String getModuleOverrideName(AppArtifact artifact, String filename) { + String type = filename.substring(filename.lastIndexOf(".")); + return artifact.getArtifactId() + type; + } + + private static InputStream getOverride(PathsCollection paths, ClassLoader classLoader, String filename, String modulename, + boolean useDefaultQuarkusBranding) + throws IOException { + + // First check if the developer supplied the files + InputStream overrideStream = getCustomOverride(paths, filename, modulename); + if (overrideStream == null && useDefaultQuarkusBranding) { + // Else check if Quarkus has a default branding + overrideStream = getQuarkusOverride(classLoader, filename, modulename); + } + return overrideStream; + } + + private static InputStream insertVariables(AppArtifact appArtifact, InputStream is, String filename) throws IOException { + // Allow replacement of certain values in css + if (filename.endsWith(CSS)) { + Config c = ConfigProvider.getConfig(); + String contents = new String(IoUtil.readBytes(is)); + contents = contents.replace("{applicationName}", + c.getOptionalValue("quarkus.application.name", String.class) + .orElse(appArtifact.getArtifactId())); + + contents = contents.replace("{applicationVersion}", + c.getOptionalValue("quarkus.application.version", String.class) + .orElse(appArtifact.getVersion())); + + contents = contents.replace("{quarkusVersion}", Version.getVersion()); + is = new ByteArrayInputStream(contents.getBytes()); + } + return is; + } + + private static InputStream getCustomOverride(PathsCollection paths, String filename, String modulename) { + // Check if the developer supplied the files + Path customOverridePath = getCustomOverridePath(paths, filename, modulename); + if (customOverridePath != null) { + return pathToStream(customOverridePath).orElse(null); + } + return null; + } + + private static Path getCustomOverridePath(PathsCollection paths, String filename, String modulename) { + + // First check if the developer supplied the files + Iterator iterator = paths.iterator(); + while (iterator.hasNext()) { + Path root = iterator.next(); + Path customModuleOverride = root.resolve(CUSTOM_MEDIA_FOLDER + modulename); + if (Files.exists(customModuleOverride)) { + return customModuleOverride; + } + Path customOverride = root.resolve(CUSTOM_MEDIA_FOLDER + filename); + if (Files.exists(customOverride)) { + return customOverride; + } + } + return null; + } + + private static InputStream getQuarkusOverride(ClassLoader classLoader, String filename, String modulename) { + // Allow quarkus per module override + boolean quarkusModuleOverride = fileExistInClasspath(classLoader, CUSTOM_MEDIA_FOLDER + modulename); + if (quarkusModuleOverride) { + return classLoader.getResourceAsStream(CUSTOM_MEDIA_FOLDER + modulename); + } + boolean quarkusOverride = fileExistInClasspath(classLoader, CUSTOM_MEDIA_FOLDER + filename); + if (quarkusOverride) { + return classLoader.getResourceAsStream(CUSTOM_MEDIA_FOLDER + filename); + } + return null; + } + + private static boolean isOverride(PathsCollection paths, ClassLoader classLoader, String filename, String modulename) { + // Check if quarkus override this. + return isQuarkusOverride(classLoader, filename, modulename) || isCustomOverride(paths, filename, modulename); + } + + private static boolean isQuarkusOverride(ClassLoader classLoader, String filename, String modulename) { + // Check if quarkus override this. + return fileExistInClasspath(classLoader, CUSTOM_MEDIA_FOLDER + modulename) + || fileExistInClasspath(classLoader, CUSTOM_MEDIA_FOLDER + filename); + } + + private static boolean isCustomOverride(PathsCollection paths, String filename, String modulename) { + Iterator iterator = paths.iterator(); + while (iterator.hasNext()) { + Path root = iterator.next(); + Path customModuleOverride = root.resolve(CUSTOM_MEDIA_FOLDER + modulename); + if (Files.exists(customModuleOverride)) { + return true; + } + Path customOverride = root.resolve(CUSTOM_MEDIA_FOLDER + filename); + return Files.exists(customOverride); + } + + return false; + } + + private static boolean fileExistInClasspath(ClassLoader classLoader, String filename) { + URL u = classLoader.getResource(filename); + return u != null; + } + + private static Optional pathToStream(Path path) { + if (Files.exists(path)) { + try { + return Optional.of(Files.newInputStream(path)); + } catch (IOException ex) { + LOG.warn("Could not read override file [" + path + "] - " + ex.getMessage()); + } + } + return Optional.empty(); + } + + private static void createFile(InputStream source, Path targetFile) throws IOException { + FileLock lock = null; + FileOutputStream fos = null; + try { + fos = new FileOutputStream(targetFile.toString()); + FileChannel channel = fos.getChannel(); + lock = channel.tryLock(); + if (lock != null) { + IoUtils.copy(fos, source); + } + } finally { + if (lock != null) { + lock.release(); + } + if (fos != null) { + fos.close(); + } + } + } + + public static Path createResourcesDirectory(AppArtifact userApplication, AppArtifact resourcesArtifact) { + try { + Path path = Paths.get(TMP_DIR, "quarkus", userApplication.getGroupId(), userApplication.getArtifactId(), + resourcesArtifact.getGroupId(), resourcesArtifact.getArtifactId(), resourcesArtifact.getVersion()); + + if (!Files.exists(path)) { + Files.createDirectories(path); + } + return path; + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + + private static boolean isEmpty(final Path directory) throws IOException { + try (DirectoryStream dirStream = Files.newDirectoryStream(directory)) { + return !dirStream.iterator().hasNext(); + } + } + + private static String normalizeRootFolderInJar(String rootFolderInJar) { + if (rootFolderInJar.endsWith("/")) { + return rootFolderInJar; + } + + return rootFolderInJar + "/"; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/AugmentActionImpl.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/AugmentActionImpl.java index 7679468d0b543..5b41d3c89cf6c 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/AugmentActionImpl.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/AugmentActionImpl.java @@ -1,16 +1,24 @@ package io.quarkus.runner.bootstrap; import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Properties; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Function; import java.util.stream.Collectors; import org.eclipse.microprofile.config.spi.ConfigProviderResolver; @@ -21,8 +29,10 @@ import io.quarkus.bootstrap.app.ArtifactResult; import io.quarkus.bootstrap.app.AugmentAction; import io.quarkus.bootstrap.app.AugmentResult; +import io.quarkus.bootstrap.app.ClassChangeInformation; import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.classloading.ClassLoaderEventListener; import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.builder.BuildChain; import io.quarkus.builder.BuildChainBuilder; @@ -32,7 +42,6 @@ import io.quarkus.deployment.ExtensionLoader; import io.quarkus.deployment.QuarkusAugmentor; import io.quarkus.deployment.builditem.ApplicationClassNameBuildItem; -import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; import io.quarkus.deployment.builditem.ConfigDescriptionBuildItem; import io.quarkus.deployment.builditem.GeneratedClassBuildItem; import io.quarkus.deployment.builditem.GeneratedFileSystemResourceHandledBuildItem; @@ -42,9 +51,12 @@ import io.quarkus.deployment.builditem.MainClassBuildItem; import io.quarkus.deployment.builditem.RawCommandLineArgumentsBuildItem; import io.quarkus.deployment.builditem.ShutdownContextBuildItem; +import io.quarkus.deployment.builditem.TransformedClassesBuildItem; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; +import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; import io.quarkus.deployment.pkg.builditem.JarBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; +import io.quarkus.dev.spi.DevModeType; import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.configuration.ProfileManager; @@ -56,13 +68,16 @@ public class AugmentActionImpl implements AugmentAction { private static final Logger log = Logger.getLogger(AugmentActionImpl.class); private static final Class[] NON_NORMAL_MODE_OUTPUTS = { GeneratedClassBuildItem.class, - GeneratedResourceBuildItem.class, BytecodeTransformerBuildItem.class, ApplicationClassNameBuildItem.class, - MainClassBuildItem.class, GeneratedFileSystemResourceHandledBuildItem.class }; + GeneratedResourceBuildItem.class, ApplicationClassNameBuildItem.class, + MainClassBuildItem.class, GeneratedFileSystemResourceHandledBuildItem.class, + TransformedClassesBuildItem.class }; private final QuarkusBootstrap quarkusBootstrap; private final CuratedApplication curatedApplication; private final LaunchMode launchMode; + private final DevModeType devModeType; private final List> chainCustomizers; + private final List classLoadListeners; /** * A map that is shared between all re-runs of the same augment instance. This is @@ -72,15 +87,105 @@ public class AugmentActionImpl implements AugmentAction { private final Map, Object> reloadContext = new ConcurrentHashMap<>(); public AugmentActionImpl(CuratedApplication curatedApplication) { - this(curatedApplication, Collections.emptyList()); + this(curatedApplication, Collections.emptyList(), Collections.emptyList()); } + /** + * Leaving this here for backwards compatibility, even though this is only internal. + * + * @Deprecated use one of the other constructors + */ + @Deprecated public AugmentActionImpl(CuratedApplication curatedApplication, List> chainCustomizers) { + this(curatedApplication, chainCustomizers, Collections.emptyList()); + } + + public AugmentActionImpl(CuratedApplication curatedApplication, List> chainCustomizers, + List classLoadListeners) { this.quarkusBootstrap = curatedApplication.getQuarkusBootstrap(); this.curatedApplication = curatedApplication; this.chainCustomizers = chainCustomizers; - this.launchMode = quarkusBootstrap.getMode() == QuarkusBootstrap.Mode.PROD ? LaunchMode.NORMAL - : quarkusBootstrap.getMode() == QuarkusBootstrap.Mode.TEST ? LaunchMode.TEST : LaunchMode.DEVELOPMENT; + this.classLoadListeners = classLoadListeners; + LaunchMode launchMode; + DevModeType devModeType; + switch (quarkusBootstrap.getMode()) { + case DEV: + launchMode = LaunchMode.DEVELOPMENT; + devModeType = DevModeType.LOCAL; + break; + case PROD: + launchMode = LaunchMode.NORMAL; + devModeType = null; + break; + case TEST: + launchMode = LaunchMode.TEST; + devModeType = null; + break; + case REMOTE_DEV_CLIENT: + //this seems a bit counter intuitive, but the remote dev client just keeps a production + //app up to date and ships it to the remote side, this allows the remote side to be fully up + //to date even if the process is restarted + launchMode = LaunchMode.NORMAL; + devModeType = DevModeType.REMOTE_LOCAL_SIDE; + break; + case REMOTE_DEV_SERVER: + launchMode = LaunchMode.DEVELOPMENT; + devModeType = DevModeType.REMOTE_SERVER_SIDE; + break; + default: + throw new RuntimeException("Unknown launch mode " + quarkusBootstrap.getMode()); + } + this.launchMode = launchMode; + this.devModeType = devModeType; + } + + @Override + public void performCustomBuild(String resultHandler, Object context, String... finalOutputs) { + ClassLoader classLoader = curatedApplication.createDeploymentClassLoader(); + Class[] targets = Arrays.stream(finalOutputs) + .map(new Function>() { + @Override + public Class apply(String s) { + try { + return (Class) Class.forName(s, false, classLoader); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + }).toArray(Class[]::new); + BuildResult result = runAugment(true, Collections.emptySet(), null, classLoader, targets); + + String debugSourcesDir = BootstrapDebug.DEBUG_SOURCES_DIR; + if (debugSourcesDir != null) { + for (GeneratedClassBuildItem i : result.consumeMulti(GeneratedClassBuildItem.class)) { + try { + if (i.getSource() != null) { + File debugPath = new File(debugSourcesDir); + if (!debugPath.exists()) { + debugPath.mkdir(); + } + File sourceFile = new File(debugPath, i.getName() + ".zig"); + sourceFile.getParentFile().mkdirs(); + Files.write(sourceFile.toPath(), i.getSource().getBytes(StandardCharsets.UTF_8), + StandardOpenOption.CREATE); + log.infof("Wrote source: %s", sourceFile.getAbsolutePath()); + } else { + log.infof("Source not available: %s", i.getName()); + } + } catch (Exception t) { + log.errorf(t, "Failed to write debug source file: %s", i.getName()); + } + } + } + try { + BiConsumer consumer = (BiConsumer) Class + .forName(resultHandler, false, classLoader) + .getConstructor().newInstance(); + consumer.accept(context, result); + } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | ClassNotFoundException + | InvocationTargetException e) { + throw new RuntimeException(e); + } } @Override @@ -89,7 +194,7 @@ public AugmentResult createProductionApplication() { throw new IllegalStateException("Can only create a production application when using NORMAL launch mode"); } ClassLoader classLoader = curatedApplication.createDeploymentClassLoader(); - BuildResult result = runAugment(true, Collections.emptySet(), classLoader, ArtifactResultBuildItem.class); + BuildResult result = runAugment(true, Collections.emptySet(), null, classLoader, ArtifactResultBuildItem.class); String debugSourcesDir = BootstrapDebug.DEBUG_SOURCES_DIR; if (debugSourcesDir != null) { @@ -116,37 +221,68 @@ public AugmentResult createProductionApplication() { JarBuildItem jarBuildItem = result.consumeOptional(JarBuildItem.class); NativeImageBuildItem nativeImageBuildItem = result.consumeOptional(NativeImageBuildItem.class); - return new AugmentResult(result.consumeMulti(ArtifactResultBuildItem.class).stream() - .map(a -> new ArtifactResult(a.getPath(), a.getType(), a.getAdditionalPaths())) + List artifactResultBuildItems = result.consumeMulti(ArtifactResultBuildItem.class); + BuildSystemTargetBuildItem buildSystemTargetBuildItem = result.consume(BuildSystemTargetBuildItem.class); + + // this depends on the fact that the order in which we can obtain MultiBuildItems is the same as they are produced + // we want to write result of the final artifact created + ArtifactResultBuildItem lastResult = artifactResultBuildItems.get(artifactResultBuildItems.size() - 1); + writeArtifactResultMetadataFile(buildSystemTargetBuildItem, lastResult); + + return new AugmentResult(artifactResultBuildItems.stream() + .map(a -> new ArtifactResult(a.getPath(), a.getType(), a.getMetadata())) .collect(Collectors.toList()), jarBuildItem != null ? jarBuildItem.toJarResult() : null, nativeImageBuildItem != null ? nativeImageBuildItem.getPath() : null); } + private void writeArtifactResultMetadataFile(BuildSystemTargetBuildItem outputTargetBuildItem, + ArtifactResultBuildItem lastResult) { + Path quarkusArtifactMetadataPath = outputTargetBuildItem.getOutputDirectory().resolve("quarkus-artifact.properties"); + Properties properties = new Properties(); + properties.put("type", lastResult.getType()); + if (lastResult.getPath() != null) { + properties.put("path", outputTargetBuildItem.getOutputDirectory().relativize(lastResult.getPath()).toString()); + } + Map metadata = lastResult.getMetadata(); + if (metadata != null) { + for (Map.Entry entry : metadata.entrySet()) { + if (entry.getValue() instanceof String) { + properties.put("metadata." + entry.getKey(), entry.getValue()); + } + } + } + try (FileOutputStream fos = new FileOutputStream(quarkusArtifactMetadataPath.toFile())) { + properties.store(fos, "Generated by Quarkus - Do not edit manually"); + } catch (IOException e) { + log.debug("Unable to write artifact result metadata file", e); + } + } + @Override public StartupActionImpl createInitialRuntimeApplication() { if (launchMode == LaunchMode.NORMAL) { throw new IllegalStateException("Cannot launch a runtime application with NORMAL launch mode"); } ClassLoader classLoader = curatedApplication.createDeploymentClassLoader(); - @SuppressWarnings("unchecked") - BuildResult result = runAugment(true, Collections.emptySet(), classLoader, NON_NORMAL_MODE_OUTPUTS); - - return new StartupActionImpl(curatedApplication, result, classLoader); + BuildResult result = runAugment(true, Collections.emptySet(), null, classLoader, NON_NORMAL_MODE_OUTPUTS); + return new StartupActionImpl(curatedApplication, result); } @Override - public StartupActionImpl reloadExistingApplication(boolean hasStartedSuccessfully, Set changedResources) { + public StartupActionImpl reloadExistingApplication(boolean hasStartedSuccessfully, Set changedResources, + ClassChangeInformation classChangeInformation) { if (launchMode != LaunchMode.DEVELOPMENT) { throw new IllegalStateException("Only application with launch mode DEVELOPMENT can restart"); } ClassLoader classLoader = curatedApplication.createDeploymentClassLoader(); @SuppressWarnings("unchecked") - BuildResult result = runAugment(!hasStartedSuccessfully, changedResources, classLoader, NON_NORMAL_MODE_OUTPUTS); + BuildResult result = runAugment(!hasStartedSuccessfully, changedResources, classChangeInformation, classLoader, + NON_NORMAL_MODE_OUTPUTS); - return new StartupActionImpl(curatedApplication, result, classLoader); + return new StartupActionImpl(curatedApplication, result); } /** @@ -167,7 +303,9 @@ public BuildResult runCustomAction(Consumer chainBuild, Consu final BuildChainBuilder chainBuilder = BuildChain.builder(); chainBuilder.setClassLoader(classLoader); - ExtensionLoader.loadStepsFrom(classLoader).accept(chainBuilder); + ExtensionLoader.loadStepsFrom(classLoader, new Properties(), + curatedApplication.getAppModel().getPlatformProperties(), launchMode, devModeType, null) + .accept(chainBuilder); chainBuilder.loadProviders(classLoader); for (Consumer c : chainCustomizers) { @@ -184,7 +322,8 @@ public BuildResult runCustomAction(Consumer chainBuild, Consu BuildChain chain = chainBuilder .build(); BuildExecutionBuilder execBuilder = chain.createExecutionBuilder("main") - .produce(new LaunchModeBuildItem(launchMode)) + .produce(new LaunchModeBuildItem(launchMode, + devModeType == null ? Optional.empty() : Optional.of(devModeType))) .produce(new ShutdownContextBuildItem()) .produce(new RawCommandLineArgumentsBuildItem()) .produce(new LiveReloadBuildItem()); @@ -203,7 +342,8 @@ public BuildResult runCustomAction(Consumer chainBuild, Consu } } - private BuildResult runAugment(boolean firstRun, Set changedResources, ClassLoader deploymentClassLoader, + private BuildResult runAugment(boolean firstRun, Set changedResources, + ClassChangeInformation classChangeInformation, ClassLoader deploymentClassLoader, Class... finalOutputs) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { @@ -225,11 +365,14 @@ private BuildResult runAugment(boolean firstRun, Set changedResources, C } builder.setLaunchMode(launchMode); + builder.setDevModeType(devModeType); builder.setRebuild(quarkusBootstrap.isRebuild()); if (firstRun) { - builder.setLiveReloadState(new LiveReloadBuildItem(false, Collections.emptySet(), reloadContext)); + builder.setLiveReloadState( + new LiveReloadBuildItem(false, Collections.emptySet(), reloadContext, classChangeInformation)); } else { - builder.setLiveReloadState(new LiveReloadBuildItem(true, changedResources, reloadContext)); + builder.setLiveReloadState( + new LiveReloadBuildItem(true, changedResources, reloadContext, classChangeInformation)); } for (AdditionalDependency i : quarkusBootstrap.getAdditionalApplicationArchives()) { //this gets added to the class path either way diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/GenerateConfigTask.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/GenerateConfigTask.java index acace794fcb34..84a4e1e512277 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/GenerateConfigTask.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/GenerateConfigTask.java @@ -41,7 +41,7 @@ public void accept(CuratedApplication application, Map stringObj try { Path temp = Files.createTempDirectory("empty"); try { - AugmentActionImpl augmentAction = new AugmentActionImpl(application, Collections.emptyList()); + AugmentActionImpl augmentAction = new AugmentActionImpl(application); BuildResult buildResult = augmentAction.runCustomAction(new Consumer() { @Override public void accept(BuildChainBuilder chainBuilder) { diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java index 948d896301652..609c043335650 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java @@ -14,6 +14,8 @@ public class RunningQuarkusApplicationImpl implements RunningQuarkusApplication private final Closeable closeTask; private final ClassLoader classLoader; + private boolean closing; + public RunningQuarkusApplicationImpl(Closeable closeTask, ClassLoader classLoader) { this.closeTask = closeTask; this.classLoader = classLoader; @@ -26,7 +28,10 @@ public ClassLoader getClassLoader() { @Override public void close() throws Exception { - closeTask.close(); + if (!closing) { + closing = true; + closeTask.close(); + } } @Override diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java index 6fed3f94104a7..b2ca68c4ab709 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java @@ -9,20 +9,15 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.StandardOpenOption; -import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.function.BiFunction; import java.util.function.Consumer; -import java.util.function.Predicate; import org.jboss.logging.Logger; -import org.objectweb.asm.ClassVisitor; import io.quarkus.bootstrap.BootstrapDebug; import io.quarkus.bootstrap.app.CuratedApplication; @@ -32,12 +27,11 @@ import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.builder.BuildResult; import io.quarkus.deployment.builditem.ApplicationClassNameBuildItem; -import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; import io.quarkus.deployment.builditem.GeneratedClassBuildItem; import io.quarkus.deployment.builditem.GeneratedResourceBuildItem; import io.quarkus.deployment.builditem.MainClassBuildItem; +import io.quarkus.deployment.builditem.TransformedClassesBuildItem; import io.quarkus.deployment.configuration.RunTimeConfigurationGenerator; -import io.quarkus.deployment.index.ConstPoolScanner; import io.quarkus.dev.appstate.ApplicationStateNotification; import io.quarkus.runtime.Quarkus; @@ -49,35 +43,30 @@ public class StartupActionImpl implements StartupAction { private final BuildResult buildResult; private final QuarkusClassLoader runtimeClassLoader; - public StartupActionImpl(CuratedApplication curatedApplication, BuildResult buildResult, - ClassLoader deploymentClassLoader) { + public StartupActionImpl(CuratedApplication curatedApplication, BuildResult buildResult) { this.curatedApplication = curatedApplication; this.buildResult = buildResult; Set eagerClasses = new HashSet<>(); - Map> transformerPredicates = new HashMap<>(); - Map>> bytecodeTransformers = extractTransformers( - eagerClasses, transformerPredicates); + Map transformedClasses = extractTransformers(eagerClasses); QuarkusClassLoader baseClassLoader = curatedApplication.getBaseRuntimeClassLoader(); QuarkusClassLoader runtimeClassLoader; //so we have some differences between dev and test mode here. //test mode only has a single class loader, while dev uses a disposable runtime class loader //that is discarded between restarts - if (curatedApplication.getQuarkusBootstrap().getMode() == QuarkusBootstrap.Mode.DEV) { - baseClassLoader.reset(extractGeneratedResources(false), bytecodeTransformers, transformerPredicates, - deploymentClassLoader); - runtimeClassLoader = curatedApplication.createRuntimeClassLoader(baseClassLoader, - bytecodeTransformers, transformerPredicates, - deploymentClassLoader, extractGeneratedResources(true)); - } else { - Map resources = new HashMap<>(); + Map resources = new HashMap<>(); + resources.putAll(extractGeneratedResources(true)); + if (curatedApplication.getQuarkusBootstrap().getMode() == QuarkusBootstrap.Mode.TEST) { resources.putAll(extractGeneratedResources(false)); - resources.putAll(extractGeneratedResources(true)); - baseClassLoader.reset(resources, bytecodeTransformers, transformerPredicates, deploymentClassLoader); + baseClassLoader.reset(resources, transformedClasses); runtimeClassLoader = baseClassLoader; + } else { + baseClassLoader.reset(extractGeneratedResources(false), + transformedClasses); + runtimeClassLoader = curatedApplication.createRuntimeClassLoader(baseClassLoader, + resources, transformedClasses); } this.runtimeClassLoader = runtimeClassLoader; - handleEagerClasses(runtimeClassLoader, eagerClasses); } private void handleEagerClasses(QuarkusClassLoader runtimeClassLoader, Set eagerClasses) { @@ -117,7 +106,7 @@ public void run() { /** * Runs the application by running the main method of the main class. As this is a blocking method a new * thread is created to run this task. - * + *

* Before this method is called an appropriate exit handler will likely need to * be set in {@link io.quarkus.runtime.ApplicationLifecycleManager#setDefaultExitCodeHandler(Consumer)} * of the JVM will exit when the app stops. @@ -262,40 +251,18 @@ public ClassLoader getClassLoader() { return runtimeClassLoader; } - private Map>> extractTransformers(Set eagerClasses, - Map> transformerPredicates) { - Map>> bytecodeTransformers = new HashMap<>(); - Set noConstScanning = new HashSet<>(); - Map> constScanning = new HashMap<>(); - List transformers = buildResult.consumeMulti(BytecodeTransformerBuildItem.class); - for (BytecodeTransformerBuildItem i : transformers) { - List> list = bytecodeTransformers.get(i.getClassToTransform()); - if (list == null) { - bytecodeTransformers.put(i.getClassToTransform(), list = new ArrayList<>()); - } - list.add(i.getVisitorFunction()); - if (i.isEager()) { - eagerClasses.add(i.getClassToTransform()); - } - if (i.getRequireConstPoolEntry() == null || i.getRequireConstPoolEntry().isEmpty()) { - noConstScanning.add(i.getClassToTransform()); - } else { - constScanning.computeIfAbsent(i.getClassToTransform(), (s) -> new HashSet<>()) - .addAll(i.getRequireConstPoolEntry()); - } - } - for (String i : noConstScanning) { - constScanning.remove(i); - } - for (Map.Entry> entry : constScanning.entrySet()) { - transformerPredicates.put(entry.getKey(), new Predicate() { - @Override - public boolean test(byte[] bytes) { - return ConstPoolScanner.constPoolEntryPresent(bytes, entry.getValue()); + private Map extractTransformers(Set eagerClasses) { + Map ret = new HashMap<>(); + TransformedClassesBuildItem transformers = buildResult.consume(TransformedClassesBuildItem.class); + for (Set i : transformers.getTransformedClassesByJar().values()) { + for (TransformedClassesBuildItem.TransformedClass clazz : i) { + ret.put(clazz.getFileName(), clazz.getData()); + if (clazz.isEager()) { + eagerClasses.add(clazz.getClassName()); } - }); + } } - return bytecodeTransformers; + return ret; } private Map extractGeneratedResources(boolean applicationClasses) { diff --git a/extensions/smallrye-graphql/deployment/src/main/resources/META-INF/resources/favicon.ico b/core/deployment/src/main/resources/META-INF/branding/favicon.ico similarity index 100% rename from extensions/smallrye-graphql/deployment/src/main/resources/META-INF/resources/favicon.ico rename to core/deployment/src/main/resources/META-INF/branding/favicon.ico diff --git a/core/deployment/src/main/resources/META-INF/branding/logo.png b/core/deployment/src/main/resources/META-INF/branding/logo.png new file mode 100644 index 0000000000000..34608dbc02754 Binary files /dev/null and b/core/deployment/src/main/resources/META-INF/branding/logo.png differ diff --git a/core/deployment/src/main/resources/META-INF/branding/smallrye-graphql-ui-graphiql.css b/core/deployment/src/main/resources/META-INF/branding/smallrye-graphql-ui-graphiql.css new file mode 100644 index 0000000000000..7f2d9aa345b01 --- /dev/null +++ b/core/deployment/src/main/resources/META-INF/branding/smallrye-graphql-ui-graphiql.css @@ -0,0 +1,72 @@ +body { + height: 100%; + margin: 0 !important; + width: 100%; + overflow: hidden; + box-sizing: border-box; +} + +#graphiql { + height: 100vh; +} + +#graphiql .topBar{ + background:#343a40; +} + +#graphiql .execute-button { + box-shadow:unset; + background: #4695eb; +} + +#graphiql .toolbar-button { + padding-top: 12px !important; + box-shadow:unset; + background: transparent; + color: #9a9da0; +} + +#graphiql .topBar::after { + content: "{applicationName} {applicationVersion} (powered by Quarkus {quarkusVersion})"; + position: absolute; + padding-top: 6px; + right: 100px; + color: #9a9da0; +} + +#graphiql .docExplorerShow { + background: #4695eb; + color: white; +} + +#graphiql .docExplorerWrap { + color: black; +} +#graphiql .docExplorerHide{ + color: black; +} + +#graphiql .topBar { + padding-bottom: 12px !important; + padding-top: 12px !important; +} + +#graphiql .title a::after { + content: "GraphQL UI"; +} + +#graphiql .title a { + display: flex; + align-items:center; + color: white; + text-decoration: unset; + font-size: 1.25rem; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; +} + +#graphiql .title img { + border-right: 1px solid darkgrey; + padding-right: 10px; + margin-right: 10px; + height: 30px; +} \ No newline at end of file diff --git a/core/deployment/src/main/resources/META-INF/branding/smallrye-health-ui.css b/core/deployment/src/main/resources/META-INF/branding/smallrye-health-ui.css new file mode 100644 index 0000000000000..258a41795895f --- /dev/null +++ b/core/deployment/src/main/resources/META-INF/branding/smallrye-health-ui.css @@ -0,0 +1,36 @@ +.navbar { + padding: unset; + justify-content: left; +} + +.mr-auto, .mx-auto { + margin-right: unset !important; + padding-right: 15px; +} + +.navbar-brand img { + border-right: 1px solid darkgrey; + padding-left: 12px; + padding-right: 10px; + margin-right: 5px; + height: 30px; +} + +#state { + padding-right: 10px; + font-weight: normal; +} + +.nav-item h3 span { + font-size: 12px +} + +.navbar::after { + content: "{applicationName} {applicationVersion} (powered by Quarkus {quarkusVersion})"; + font-size: 0.9em; + padding-right: 10px; + padding-left: 10px; + position: absolute; + right: 0px; + color: #9a9da0; +} \ No newline at end of file diff --git a/core/deployment/src/main/resources/META-INF/branding/smallrye-open-api-ui.css b/core/deployment/src/main/resources/META-INF/branding/smallrye-open-api-ui.css new file mode 100644 index 0000000000000..f46c8e86c4d45 --- /dev/null +++ b/core/deployment/src/main/resources/META-INF/branding/smallrye-open-api-ui.css @@ -0,0 +1,82 @@ +html{ + box-sizing: border-box; + overflow: -moz-scrollbars-vertical; + overflow-y: scroll; +} + +#swagger-ui .topbar { + padding-top: 5px !important; + padding-left: 14px !important; + padding-bottom: 5px !important; +} + +#swagger-ui .swagger-ui .topbar a{ + max-width: 230px; +} + +#swagger-ui .topbar-wrapper .link a::after { + content: "Swagger UI"; + font-size: 1.25rem; + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; +} + +#swagger-ui .topbar-wrapper .download-url-wrapper::after { + content: "{applicationName} {applicationVersion} (powered by Quarkus {quarkusVersion})"; + color: #9a9da0; + padding-top: 12px; + position: absolute; + right: 15px; + font-size: 0.83em; +} + +#swagger-ui .topbar .wrapper { + max-width: unset !important; + padding: unset; + padding-top: 5px; + padding-bottom: 5px; +} + +#swagger-ui .download-url-wrapper { + justify-content: left; +} + +#swagger-ui .download-url-input { + width: 30%; + +} + +#swagger-ui .topbar .wrapper img { + border-right: 1px solid darkgrey; + padding-right: 10px; + margin-right: 10px; + height: 30px; +} + +#swagger-ui .download-url-button { + background-color: #4695eb; + color: white; +} + +*, +*:before, +*:after +{ + box-sizing: inherit; +} + +body{ + margin:0; + background: #fafafa; +} + +.swagger-ui .topbar { + background-color: #343a40; +} + +#footer { + font-family:sans-serif; + color:#3b4151; + font-size:70%; + text-align: center; +} \ No newline at end of file diff --git a/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStepTest.java b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStepTest.java new file mode 100644 index 0000000000000..4f4ce17d781a9 --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildStepTest.java @@ -0,0 +1,105 @@ +package io.quarkus.deployment.pkg.steps; + +import static io.quarkus.deployment.pkg.steps.NativeImageBuildStep.GraalVM.Distribution.MANDREL; +import static io.quarkus.deployment.pkg.steps.NativeImageBuildStep.GraalVM.Distribution.ORACLE; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; + +import io.quarkus.deployment.pkg.steps.NativeImageBuildStep.GraalVM.Distribution; +import io.quarkus.deployment.pkg.steps.NativeImageBuildStep.GraalVM.Version; + +public class NativeImageBuildStepTest { + + @Test + public void testGraalVMVersionDetected() { + assertVersion(1, 0, ORACLE, Version.of(Stream.of("GraalVM Version 1.0.0"))); + assertVersion(19, 3, ORACLE, Version.of(Stream.of("GraalVM Version 19.3.0"))); + assertVersion(19, 3, ORACLE, Version.of(Stream.of("GraalVM Version 19.3.3"))); + assertVersion(20, 0, ORACLE, Version.of(Stream.of("GraalVM Version 20.0.0"))); + assertVersion(20, 1, ORACLE, Version.of(Stream.of("GraalVM Version 20.1.0 (Java Version 11.0.7)"))); + assertVersion(20, 1, MANDREL, Version + .of(Stream.of("GraalVM Version 20.1.0.1.Alpha2 56d4ee1b28 (Mandrel Distribution) (Java Version 11.0.8)"))); + assertVersion(20, 1, MANDREL, Version + .of(Stream.of("GraalVM Version 20.1.0.1-Final 56d4ee1b28 (Mandrel Distribution) (Java Version 11.0.8)"))); + assertVersion(21, 0, MANDREL, Version + .of(Stream.of("GraalVM Version 21.0.0.0-0b3 (Mandrel Distribution) (Java Version 11.0.8)"))); + assertVersion(20, 3, MANDREL, Version + .of(Stream.of("GraalVM Version 20.3.1.2-dev (Mandrel Distribution) (Java Version 11.0.8)"))); + } + + static void assertVersion(int major, int minor, Distribution distro, Version version) { + assertThat(version.major).isEqualTo(major); + assertThat(version.minor).isEqualTo(minor); + + assertThat(version.distribution).isEqualTo(distro); + if (distro == MANDREL) + assertThat(version.isMandrel()).isTrue(); + + assertThat(version.isDetected()).isEqualTo(true); + } + + @Test + public void testGraalVMVersionUndetected() { + assertThat(Version.of(Stream.of("foo bar")).isDetected()).isFalse(); + } + + @Test + public void testGraalVMVersionSnapshot() { + assertSnapshot(MANDREL, + Version.of(Stream.of("GraalVM Version beb2fd6 (Mandrel Distribution) (Java Version 11.0.9-internal)"))); + } + + static void assertSnapshot(Distribution distro, Version version) { + assertThat(version.distribution).isEqualTo(distro); + assertThat(version.isDetected()).isEqualTo(true); + assertThat(version.isSnapshot()).isEqualTo(true); + } + + @Test + public void testGraalVMVersionsOlderThan() { + assertOlderThan("GraalVM Version 1.0.0", "GraalVM Version 19.3.0"); + assertOlderThan("GraalVM Version 20.0.0", "GraalVM Version 20.1.0"); + } + + /** + * Asserts that version is older than other. + */ + static void assertOlderThan(String version, String other) { + assertThat(Version.of(Stream.of(version)).compareTo(Version.of(Stream.of(other)))).isLessThan(0); + } + + @Test + public void testGraalVMVersionsCompareEqualTo() { + assertCompareEqualTo("GraalVM Version 20.1.0", "GraalVM Version 20.1.0"); + // Distributions don't impact newer/older/same comparisons + assertCompareEqualTo("GraalVM Version 20.1.0", + "GraalVM Version 20.1.0.1.Alpha2 56d4ee1b28 (Mandrel Distribution) (Java Version 11.0.8)"); + // Micro versions ignored, hence two micros are considered the same right now + assertCompareEqualTo("GraalVM Version 19.3.0", "GraalVM Version 19.3.3"); + } + + /** + * Asserts that version is equals to other when compared. + */ + static void assertCompareEqualTo(String version, String other) { + assertThat(Version.of(Stream.of(version)).compareTo(Version.of(Stream.of(other)))).isEqualTo(0); + } + + @Test + public void testGraalVMVersionsNewerThan() { + assertNewerThan("GraalVM Version 20.0.0", "GraalVM Version 19.3.0"); + assertNewerThan("GraalVM Version 20.1.0.1.Alpha2 56d4ee1b28 (Mandrel Distribution) (Java Version 11.0.8)", + "GraalVM Version 20.0.0"); + } + + /** + * Asserts that version is newer than other. + */ + static void assertNewerThan(String version, String other) { + assertThat(Version.of(Stream.of(version)).isNewerThan(Version.of(Stream.of(other)))).isTrue(); + } + +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleClass.java b/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleClass.java index 2237ad2a2dad9..9b0ae262321fe 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleClass.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleClass.java @@ -7,7 +7,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleInvocationHandler.java b/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleInvocationHandler.java index a063a852eb188..906db4d57cc32 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleInvocationHandler.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/proxy/SimpleInvocationHandler.java @@ -7,7 +7,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/core/deployment/src/test/java/io/quarkus/deployment/recording/BytecodeRecorderTestCase.java b/core/deployment/src/test/java/io/quarkus/deployment/recording/BytecodeRecorderTestCase.java index c2a30db76293a..16fce39416240 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/recording/BytecodeRecorderTestCase.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/recording/BytecodeRecorderTestCase.java @@ -22,6 +22,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import io.quarkus.deployment.TestClassLoader; @@ -136,6 +137,39 @@ public void testJavaBean() throws Exception { }, new TestJavaBeanSubclass("A string", 99, "PUT")); } + @Test + public void testValidationFails() throws Exception { + Assertions.assertThrows(RuntimeException.class, () -> { + runTest(generator -> { + TestRecorder recorder = generator.getRecordingProxy(TestRecorder.class); + ValidationFails validationFails = new ValidationFails(); + validationFails.setName("Stuart Douglas"); + recorder.object(validationFails); + }); + }); + } + + @Test + public void testRelaxedValidationSucceeds() throws Exception { + runTest(generator -> { + TestRecorder recorder = generator.getRecordingProxy(TestRecorder.class); + ValidationFails validationFails = new ValidationFails(); + validationFails.setName("Stuart Douglas"); + recorder.relaxedObject(validationFails); + }, new ValidationFails("Stuart Douglas")); + } + + @Test + public void testIgnoredProperties() throws Exception { + runTest(generator -> { + TestRecorder recorder = generator.getRecordingProxy(TestRecorder.class); + IgnoredProperties ignoredProperties = new IgnoredProperties(); + ignoredProperties.setNotIgnored("Shows up"); + ignoredProperties.setIgnoredField("Does not show up"); + recorder.ignoredProperties(ignoredProperties); + }, new IgnoredProperties("Shows up", null)); + } + @Test public void testJavaBeanWithEmbeddedReturnValue() throws Exception { runTest(generator -> { diff --git a/core/deployment/src/test/java/io/quarkus/deployment/recording/IgnoredProperties.java b/core/deployment/src/test/java/io/quarkus/deployment/recording/IgnoredProperties.java new file mode 100644 index 0000000000000..286ca1ebba4c0 --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/recording/IgnoredProperties.java @@ -0,0 +1,65 @@ +package io.quarkus.deployment.recording; + +import java.util.Objects; + +import io.quarkus.runtime.annotations.IgnoreProperty; + +public class IgnoredProperties { + + private String notIgnored; + @IgnoreProperty + private String ignoredField; + + public IgnoredProperties() { + } + + public IgnoredProperties(String notIgnored, String ignoredField) { + this.notIgnored = notIgnored; + this.ignoredField = ignoredField; + } + + public String getNotIgnored() { + return notIgnored; + } + + public void setNotIgnored(String notIgnored) { + this.notIgnored = notIgnored; + } + + public String getIgnoredField() { + return ignoredField; + } + + public void setIgnoredField(String ignoredField) { + this.ignoredField = ignoredField; + } + + @IgnoreProperty + public String getSomethingElse() { + throw new IllegalStateException("This should not have been called"); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + IgnoredProperties that = (IgnoredProperties) o; + return Objects.equals(notIgnored, that.notIgnored) && + Objects.equals(ignoredField, that.ignoredField); + } + + @Override + public int hashCode() { + return Objects.hash(notIgnored, ignoredField); + } + + @Override + public String toString() { + return "IgnoredProperties{" + + "notIgnored='" + notIgnored + '\'' + + ", ignoredField='" + ignoredField + '\'' + + '}'; + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/recording/TestRecorder.java b/core/deployment/src/test/java/io/quarkus/deployment/recording/TestRecorder.java index 7ff855e393a42..2fb594c4628d9 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/recording/TestRecorder.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/recording/TestRecorder.java @@ -7,6 +7,7 @@ import java.util.function.Supplier; import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.annotations.RelaxedValidation; public class TestRecorder { @@ -90,4 +91,12 @@ public String get() { } }; } + + public void relaxedObject(@RelaxedValidation ValidationFails validationFails) { + RESULT.add(validationFails); + } + + public void ignoredProperties(IgnoredProperties ignoredProperties) { + RESULT.add(ignoredProperties); + } } diff --git a/core/deployment/src/test/java/io/quarkus/deployment/recording/ValidationFails.java b/core/deployment/src/test/java/io/quarkus/deployment/recording/ValidationFails.java new file mode 100644 index 0000000000000..b94ad2ff29125 --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/recording/ValidationFails.java @@ -0,0 +1,47 @@ +package io.quarkus.deployment.recording; + +import java.util.Objects; + +public class ValidationFails { + + private String name; + private boolean nameValid; + + public ValidationFails() { + } + + public ValidationFails(String name) { + setName(name); + } + + public String getName() { + return name; + } + + public ValidationFails setName(String name) { + this.name = name; + this.nameValid = name.contains(" "); + return this; + } + + public boolean isNameValid() { + return nameValid; + } + + @Override + public boolean equals(Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + ValidationFails that = (ValidationFails) o; + return nameValid == that.nameValid && + Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, nameValid); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/BasicExecutableOutputOutcomeTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/BasicExecutableOutputOutcomeTest.java index 3d8b50773050a..194cd875253eb 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/BasicExecutableOutputOutcomeTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/BasicExecutableOutputOutcomeTest.java @@ -37,6 +37,8 @@ protected TsArtifact modelApp() { addToExpectedLib(directRtDep); final TsArtifact appJar = TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) .addDependency(ext1) .addDependency(ext2) .addDependency(compileDep) diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyAddedInProfileTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyAddedInProfileTest.java index 95e385c2865c0..45f08f86d2ce9 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyAddedInProfileTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyAddedInProfileTest.java @@ -22,6 +22,8 @@ protected TsArtifact modelApp() { addToExpectedLib(extB_100_rt); final TsArtifact appJar = TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) .addDependency(extA_100); final Profile profile = new Profile(); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyVersionOverridesManagedVersionTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyVersionOverridesManagedVersionTest.java index 5203b7e258c55..d207c2669ff5a 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyVersionOverridesManagedVersionTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DependencyVersionOverridesManagedVersionTest.java @@ -18,6 +18,8 @@ protected TsArtifact modelApp() { addToExpectedLib(extB_100_rt); final TsArtifact bom = new TsArtifact("test.quarkus", "test-bom", null, "pom", "1.0.0"); + bom.addManagedDependency(platformDescriptor()); + bom.addManagedDependency(platformProperties()); bom.addManagedDependency(new TsDependency(extA_100.getRuntime())); bom.addManagedDependency(new TsDependency(extB_100_rt)); install(bom); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DirectUserDepsOverrideTransitiveExtDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DirectUserDepsOverrideTransitiveExtDepsTest.java index 1fb848e9c38ec..9c22716339009 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DirectUserDepsOverrideTransitiveExtDepsTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/DirectUserDepsOverrideTransitiveExtDepsTest.java @@ -24,6 +24,8 @@ protected TsArtifact modelApp() { addToExpectedLib(common3); final TsArtifact appJar = TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) .addDependency(ext1) .addDependency(common3) .addDependency(ext2); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeExtensionDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeExtensionDepsTest.java new file mode 100644 index 0000000000000..980bf49c1282d --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeExtensionDepsTest.java @@ -0,0 +1,88 @@ +package io.quarkus.deployment.runnerjar; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashSet; +import java.util.Set; + +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.bootstrap.model.AppModel; +import io.quarkus.bootstrap.resolver.TsArtifact; +import io.quarkus.bootstrap.resolver.TsDependency; +import io.quarkus.bootstrap.resolver.TsQuarkusExt; + +/** + * The test allowing to make sure that if we exclude an extension from another extension, the + * exclusion will be also applied to the deployment dependencies. + */ +public class ExcludeExtensionDepsTest extends ExecutableOutputOutcomeTestBase { + + @Override + protected TsArtifact modelApp() { + + final TsArtifact extADep1 = TsArtifact.jar("ext-a-dep-1"); + final TsArtifact extADep2 = TsArtifact.jar("ext-a-dep-2"); + final TsArtifact extBDep1 = TsArtifact.jar("ext-b-dep-1"); + addToExpectedLib(extBDep1); + final TsArtifact extBDep2 = TsArtifact.jar("ext-b-dep-2"); + addToExpectedLib(extBDep2); + final TsArtifact extBDepTrans1 = TsArtifact.jar("ext-b-dep-trans-1"); + addToExpectedLib(extBDepTrans1); + final TsArtifact extBDepTrans2 = TsArtifact.jar("ext-b-dep-trans-2"); + addToExpectedLib(extBDepTrans2); + final TsArtifact depToExclude1 = TsArtifact.jar("ext-dep-exclude-1"); + final TsArtifact depToExclude2 = TsArtifact.jar("ext-dep-exclude-2"); + final TsArtifact depToExclude3 = TsArtifact.jar("ext-dep-exclude-3"); + final TsArtifact depToExclude4 = TsArtifact.jar("ext-dep-exclude-4"); + final TsArtifact depToExclude5 = TsArtifact.jar("ext-dep-exclude-5"); + final TsArtifact depToExclude6 = TsArtifact.jar("ext-dep-exclude-6"); + extBDepTrans2.addDependency(new TsDependency(extBDep2)); + extBDepTrans2.addDependency(new TsDependency(depToExclude6)); + extBDepTrans1.addDependency(new TsDependency(extBDepTrans2)); + extBDepTrans1.addDependency(new TsDependency(depToExclude2)); + extBDepTrans1.addDependency(new TsDependency(depToExclude5)); + extBDep1.addDependency(extBDepTrans1, depToExclude5); + extBDep1.addDependency(new TsDependency(depToExclude1)); + + final TsQuarkusExt extA = new TsQuarkusExt("ext-a"); + extA.getRuntime() + .addDependency(extADep1); + extA.getDeployment() + .addDependency(extADep2); + final TsQuarkusExt extB = new TsQuarkusExt("ext-b"); + addToExpectedLib(extB.getRuntime()); + extB.getRuntime() + .addDependency(extBDep1, depToExclude6) + .addDependency(extA) + .addDependency(depToExclude4); + extB.getDeployment() + .addDependency(depToExclude3); + + return TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) + .addDependency(extB, extA.getRuntime(), depToExclude1, depToExclude2, depToExclude3, depToExclude4); + } + + @Override + protected void assertAppModel(AppModel appModel) throws Exception { + final Set expectedDeployDeps = new HashSet<>(); + expectedDeployDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-deployment", "1"), "compile")); + assertEquals(expectedDeployDeps, new HashSet<>(appModel.getDeploymentDependencies())); + final Set expectedRuntimeDeps = new HashSet<>(); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b", "1"), "compile")); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-dep-1", "1"), "compile")); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-dep-2", "1"), "compile")); + expectedRuntimeDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-dep-trans-1", "1"), "compile")); + expectedRuntimeDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-dep-trans-2", "1"), "compile")); + assertEquals(expectedRuntimeDeps, new HashSet<>(appModel.getUserDependencies())); + final Set expectedFullDeps = new HashSet<>(); + expectedFullDeps.addAll(expectedDeployDeps); + expectedFullDeps.addAll(expectedRuntimeDeps); + assertEquals(expectedFullDeps, new HashSet<>(appModel.getFullDeploymentDeps())); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeLibDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeLibDepsTest.java new file mode 100644 index 0000000000000..a660f1951ecfd --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExcludeLibDepsTest.java @@ -0,0 +1,80 @@ +package io.quarkus.deployment.runnerjar; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashSet; +import java.util.Set; + +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.bootstrap.model.AppModel; +import io.quarkus.bootstrap.resolver.TsArtifact; +import io.quarkus.bootstrap.resolver.TsDependency; +import io.quarkus.bootstrap.resolver.TsQuarkusExt; + +/** + * The test allowing to make sure that if we exclude a library from an extension, the + * exclusion will be also applied to the deployment dependencies. + */ +public class ExcludeLibDepsTest extends ExecutableOutputOutcomeTestBase { + + @Override + protected TsArtifact modelApp() { + + final TsArtifact extADep1 = TsArtifact.jar("ext-a-dep-1"); + addToExpectedLib(extADep1); + final TsArtifact extADep2 = TsArtifact.jar("ext-a-dep-2"); + addToExpectedLib(extADep2); + final TsArtifact extADepTrans1 = TsArtifact.jar("ext-a-dep-trans-1"); + addToExpectedLib(extADepTrans1); + final TsArtifact extADepTrans2 = TsArtifact.jar("ext-a-dep-trans-2"); + addToExpectedLib(extADepTrans2); + final TsArtifact depToExclude1 = TsArtifact.jar("ext-dep-exclude-1"); + final TsArtifact depToExclude2 = TsArtifact.jar("ext-dep-exclude-2"); + final TsArtifact depToExclude3 = TsArtifact.jar("ext-dep-exclude-3"); + final TsArtifact depToExclude4 = TsArtifact.jar("ext-dep-exclude-4"); + final TsArtifact depToExclude5 = TsArtifact.jar("ext-dep-exclude-5"); + final TsArtifact depToExclude6 = TsArtifact.jar("ext-dep-exclude-6"); + extADepTrans2.addDependency(new TsDependency(extADep2)); + extADepTrans2.addDependency(new TsDependency(depToExclude6)); + extADepTrans1.addDependency(new TsDependency(extADepTrans2)); + extADepTrans1.addDependency(new TsDependency(depToExclude2)); + extADepTrans1.addDependency(new TsDependency(depToExclude5)); + extADep1.addDependency(extADepTrans1, depToExclude5); + extADep1.addDependency(new TsDependency(depToExclude1)); + + final TsQuarkusExt extA = new TsQuarkusExt("ext-a"); + addToExpectedLib(extA.getRuntime()); + extA.getRuntime() + .addDependency(extADep1, depToExclude6) + .addDependency(depToExclude3); + extA.getDeployment() + .addDependency(depToExclude4); + + return TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) + .addDependency(extA, depToExclude1, depToExclude2, depToExclude3, depToExclude4); + } + + @Override + protected void assertAppModel(AppModel appModel) throws Exception { + final Set expectedDeployDeps = new HashSet<>(); + expectedDeployDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-deployment", "1"), "compile")); + assertEquals(expectedDeployDeps, new HashSet<>(appModel.getDeploymentDependencies())); + final Set expectedRuntimeDeps = new HashSet<>(); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a", "1"), "compile")); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-dep-1", "1"), "compile")); + expectedRuntimeDeps.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-dep-2", "1"), "compile")); + expectedRuntimeDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-dep-trans-1", "1"), "compile")); + expectedRuntimeDeps + .add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-dep-trans-2", "1"), "compile")); + assertEquals(expectedRuntimeDeps, new HashSet<>(appModel.getUserDependencies())); + final Set expectedFullDeps = new HashSet<>(); + expectedFullDeps.addAll(expectedDeployDeps); + expectedFullDeps.addAll(expectedRuntimeDeps); + assertEquals(expectedFullDeps, new HashSet<>(appModel.getFullDeploymentDeps())); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExecutableOutputOutcomeTestBase.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExecutableOutputOutcomeTestBase.java index 1bc418dffad99..c6cf3c650c7b3 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExecutableOutputOutcomeTestBase.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ExecutableOutputOutcomeTestBase.java @@ -5,6 +5,9 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -13,16 +16,21 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Properties; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.stream.Stream; +import io.quarkus.bootstrap.BootstrapConstants; import io.quarkus.bootstrap.app.AugmentAction; import io.quarkus.bootstrap.app.AugmentResult; import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.model.AppModel; import io.quarkus.bootstrap.resolver.TsArtifact; +import io.quarkus.bootstrap.resolver.TsArtifact.ContentProvider; +import io.quarkus.bootstrap.resolver.TsDependency; import io.quarkus.bootstrap.resolver.update.CreatorOutcomeTestBase; public abstract class ExecutableOutputOutcomeTestBase extends CreatorOutcomeTestBase { @@ -31,16 +39,74 @@ public abstract class ExecutableOutputOutcomeTestBase extends CreatorOutcomeTest private static final String MAIN_CLS = "io.quarkus.runner.GeneratedMain"; protected List expectedLib = new ArrayList<>(); + protected TsDependency platformDescriptor; + protected TsDependency platformPropsDep; protected void addToExpectedLib(TsArtifact entry) { expectedLib.add(entry.getGroupId() + '.' + entry.getArtifactId() + '-' + entry.getVersion() + '.' + entry.getType()); } + protected void assertAppModel(AppModel appModel) throws Exception { + } + + protected TsDependency platformDescriptor() { + if (platformDescriptor == null) { + TsArtifact platformDescr = new TsArtifact("org.acme", + "acme" + BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX, "1.0", "json", + "1.0"); + platformDescr.setContent(new ContentProvider() { + Path platformJson; + + @Override + public Path getPath(Path workDir) throws IOException { + if (platformJson == null) { + platformJson = workDir.resolve("platform-descriptor.json"); + try (BufferedWriter writer = Files.newBufferedWriter(platformJson)) { + writer.write("platform descriptor"); + } + } + return platformJson; + } + }); + platformDescr.install(repo); + platformDescriptor = new TsDependency(platformDescr); + } + return platformDescriptor; + } + + protected TsDependency platformProperties() { + if (platformPropsDep == null) { + TsArtifact platformProps = new TsArtifact("org.acme", + "acme" + BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX, null, "properties", + "1.0"); + platformProps.setContent(new ContentProvider() { + Path propsFile; + + @Override + public Path getPath(Path workDir) throws IOException { + if (propsFile == null) { + Properties props = new Properties(); + props.setProperty("platform.quarkus.native.builder-image", "builder-image-url"); + propsFile = workDir.resolve("platform-properties.properties"); + try (OutputStream os = Files.newOutputStream(propsFile)) { + props.store(os, "Test Quarkus platform properties"); + } + } + return propsFile; + } + }); + platformProps.install(repo); + platformPropsDep = new TsDependency(platformProps); + } + return platformPropsDep; + } + @Override protected void testCreator(QuarkusBootstrap creator) throws Exception { - System.setProperty("quarkus.package.type", "legacy"); + System.setProperty("quarkus.package.type", "legacy-jar"); try { CuratedApplication curated = creator.bootstrap(); + assertAppModel(curated.getAppModel()); AugmentAction action = curated.createAugmentor(); AugmentResult outcome = action.createProductionApplication(); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/FirstTransitiveDepVersionIsTheEffectiveOneTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/FirstTransitiveDepVersionIsTheEffectiveOneTest.java index 7e99bb00a5699..cb3b0ea933964 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/FirstTransitiveDepVersionIsTheEffectiveOneTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/FirstTransitiveDepVersionIsTheEffectiveOneTest.java @@ -27,6 +27,8 @@ protected TsArtifact modelApp() { addToExpectedLib(directAppDep); final TsArtifact appJar = TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) .addDependency(ext1) .addDependency(directAppDep) .addDependency(ext2); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/OptionalDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/OptionalDepsTest.java new file mode 100644 index 0000000000000..091a0118a0e17 --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/OptionalDepsTest.java @@ -0,0 +1,79 @@ +package io.quarkus.deployment.runnerjar; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashSet; +import java.util.Set; + +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.bootstrap.model.AppModel; +import io.quarkus.bootstrap.resolver.TsArtifact; +import io.quarkus.bootstrap.resolver.TsDependency; +import io.quarkus.bootstrap.resolver.TsQuarkusExt; + +public class OptionalDepsTest extends ExecutableOutputOutcomeTestBase { + + @Override + protected TsArtifact modelApp() { + + final TsArtifact extADep = TsArtifact.jar("ext-a-dep"); + addToExpectedLib(extADep); + + final TsArtifact extAOptionalDep = TsArtifact.jar("ext-a-optional-dep"); + final TsArtifact extAOptionalDeploymentDep = TsArtifact.jar("ext-a-optional-deployment-dep"); + + final TsQuarkusExt extA = new TsQuarkusExt("ext-a"); + addToExpectedLib(extA.getRuntime()); + extA.getRuntime() + .addDependency(extADep) + .addDependency(new TsDependency(extAOptionalDep, true)); + extA.getDeployment() + .addDependency(new TsDependency(extAOptionalDeploymentDep, true)); + + final TsArtifact extBOptionalDep = TsArtifact.jar("ext-b-optional-dep"); + final TsArtifact extBDeploymentDep = TsArtifact.jar("ext-b-deployment-dep"); + install(extBDeploymentDep); + final TsArtifact extBOptionalDeploymentDep = TsArtifact.jar("ext-b-optional-deployment-dep"); + install(extBOptionalDeploymentDep); + + final TsQuarkusExt extB = new TsQuarkusExt("ext-b"); + extB.getRuntime().addDependency(new TsDependency(extBOptionalDep, true)); + addToExpectedLib(extB.getRuntime()); + extB.getDeployment().addDependency(new TsDependency(extBOptionalDeploymentDep, true)); + extB.getDeployment().addDependency(new TsDependency(extBDeploymentDep, false)); + install(extB); + + final TsArtifact appOptionalDep = TsArtifact.jar("app-optional-dep") + .addDependency(extB.getRuntime()); + addToExpectedLib(appOptionalDep); + + final TsQuarkusExt extC = new TsQuarkusExt("ext-c"); + install(extC); + + final TsQuarkusExt extD = new TsQuarkusExt("ext-d"); + extD.getRuntime().addDependency(new TsDependency(extC.getRuntime(), true)); + extD.getDeployment().addDependency(new TsDependency(extC.getDeployment(), true)); + install(extD); + addToExpectedLib(extD.getRuntime()); + + return TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) + .addDependency(extA, true) + .addDependency(new TsDependency(appOptionalDep, true)) + .addDependency(extD.getRuntime()); + } + + @Override + protected void assertAppModel(AppModel appModel) throws Exception { + final Set expected = new HashSet<>(); + expected.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-deployment", "1"), "compile", true)); + expected.add( + new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-deployment-dep", "1"), "compile", true)); + expected.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-b-deployment", "1"), "compile", true)); + expected.add( + new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-d-deployment", "1"), "compile", false)); + assertEquals(expected, new HashSet<>(appModel.getDeploymentDependencies())); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java new file mode 100644 index 0000000000000..fd05600da9303 --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java @@ -0,0 +1,50 @@ +package io.quarkus.deployment.runnerjar; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashSet; +import java.util.Set; + +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppDependency; +import io.quarkus.bootstrap.model.AppModel; +import io.quarkus.bootstrap.resolver.TsArtifact; +import io.quarkus.bootstrap.resolver.TsDependency; +import io.quarkus.bootstrap.resolver.TsQuarkusExt; + +public class ProvidedExtensionDepsTest extends ExecutableOutputOutcomeTestBase { + + @Override + protected TsArtifact modelApp() { + + final TsArtifact extADep = TsArtifact.jar("ext-a-dep"); + addToExpectedLib(extADep); + + final TsArtifact extAProvidedDep = TsArtifact.jar("ext-a-provided-dep"); + + final TsArtifact extADeploymentDep = TsArtifact.jar("ext-a-deployment-dep"); + final TsArtifact extAOptionalDeploymentDep = TsArtifact.jar("ext-a-provided-deployment-dep"); + + final TsQuarkusExt extA = new TsQuarkusExt("ext-a"); + addToExpectedLib(extA.getRuntime()); + extA.getRuntime() + .addDependency(new TsDependency(extADep)) + .addDependency(new TsDependency(extAProvidedDep, "provided")); + extA.getDeployment() + .addDependency(new TsDependency(extADeploymentDep)) + .addDependency(new TsDependency(extAOptionalDeploymentDep, "provided")); + + return TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) + .addDependency(extA); + } + + @Override + protected void assertAppModel(AppModel appModel) throws Exception { + final Set expected = new HashSet<>(); + expected.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-deployment", "1"), "compile")); + expected.add(new AppDependency(new AppArtifact("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"), "compile")); + assertEquals(expected, new HashSet<>(appModel.getDeploymentDependencies())); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/SimpleExtAndAppCompileDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/SimpleExtAndAppCompileDepsTest.java index 78cae93c9f75f..4cea5413c8c21 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/SimpleExtAndAppCompileDepsTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/SimpleExtAndAppCompileDepsTest.java @@ -49,6 +49,8 @@ protected TsArtifact modelApp() { addToExpectedLib(appDirectDep); final TsArtifact appJar = TsArtifact.jar("app") + .addManagedDependency(platformDescriptor()) + .addManagedDependency(platformProperties()) .addDependency(ext1) .addDependency(ext2) .addDependency(appDirectDep); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java b/core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java index a892c2c7c0e20..d7dd632f46ed7 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java @@ -305,23 +305,23 @@ private static Index index(Class... classes) { private void checkRepoArg(Index index, Class baseClass, Class soughtClass, Class expectedArg) { List args = JandexUtil.resolveTypeParameters(name(baseClass), name(soughtClass), index); - assertThat(args).extracting("name").containsOnly(name(expectedArg)); + assertThat(args).extracting(Type::name).containsOnly(name(expectedArg)); } private void checkRepoArg(Index index, Class baseClass, Class soughtClass, Class... expectedArgs) { List args = JandexUtil.resolveTypeParameters(name(baseClass), name(soughtClass), index); - Object[] expectedArgNames = new Object[expectedArgs.length]; + DotName[] expectedArgNames = new DotName[expectedArgs.length]; for (int i = 0; i < expectedArgs.length; i++) { expectedArgNames[i] = name(expectedArgs[i]); } - assertThat(args).extracting("name").containsOnly(expectedArgNames); + assertThat(args).extracting(Type::name).containsOnly(expectedArgNames); } private void checkRepoArg(Index index, Class baseClass, Class soughtClass, String expectedArg) { List args = JandexUtil.resolveTypeParameters(name(baseClass), name(soughtClass), index); - assertThat(args).hasOnlyOneElementSatisfying(t -> { + assertThat(args).singleElement().satisfies(t -> { assertThat(t.toString()).isEqualTo(expectedArg); }); } diff --git a/core/deployment/src/test/java/io/quarkus/deployment/util/UriNormalizationTest.java b/core/deployment/src/test/java/io/quarkus/deployment/util/UriNormalizationTest.java new file mode 100644 index 0000000000000..6cc52d80e2c4e --- /dev/null +++ b/core/deployment/src/test/java/io/quarkus/deployment/util/UriNormalizationTest.java @@ -0,0 +1,62 @@ +package io.quarkus.deployment.util; + +import java.net.URI; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class UriNormalizationTest { + @Test + void testToUri() { + Assertions.assertEquals("/", UriNormalizationUtil.toURI("/", true).getPath()); + Assertions.assertEquals("", UriNormalizationUtil.toURI("/", false).getPath()); + + Assertions.assertEquals("/", UriNormalizationUtil.toURI("./", true).getPath()); + Assertions.assertEquals("", UriNormalizationUtil.toURI("./", false).getPath()); + + Assertions.assertEquals("bob/", UriNormalizationUtil.toURI("bob/", true).getPath()); + Assertions.assertEquals("bob", UriNormalizationUtil.toURI("bob/", false).getPath()); + } + + @Test + void testExamples() { + URI root = UriNormalizationUtil.toURI("/", true); + URI prefix = UriNormalizationUtil.toURI("/prefix", true); + URI foo = UriNormalizationUtil.toURI("foo", true); + + Assertions.assertEquals("/example/", UriNormalizationUtil.normalizeWithBase(root, "example", true).getPath()); + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(root, "example", false).getPath()); + Assertions.assertEquals("/example/", UriNormalizationUtil.normalizeWithBase(root, "/example", true).getPath()); + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(root, "/example", false).getPath()); + + Assertions.assertEquals("/prefix/example/", UriNormalizationUtil.normalizeWithBase(prefix, "example", true).getPath()); + Assertions.assertEquals("/prefix/example", UriNormalizationUtil.normalizeWithBase(prefix, "example", false).getPath()); + Assertions.assertEquals("/example/", UriNormalizationUtil.normalizeWithBase(prefix, "/example", true).getPath()); + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(prefix, "/example", false).getPath()); + + Assertions.assertEquals("foo/example/", UriNormalizationUtil.normalizeWithBase(foo, "example", true).getPath()); + Assertions.assertEquals("foo/example", UriNormalizationUtil.normalizeWithBase(foo, "example", false).getPath()); + Assertions.assertEquals("/example/", UriNormalizationUtil.normalizeWithBase(foo, "/example", true).getPath()); + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(foo, "/example", false).getPath()); + } + + @Test + void testDubiousUriPaths() { + URI root = UriNormalizationUtil.toURI("/", true); + + Assertions.assertEquals("/", UriNormalizationUtil.normalizeWithBase(root, "#example", false).getPath()); + Assertions.assertEquals("/", UriNormalizationUtil.normalizeWithBase(root, "?example", false).getPath()); + + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(root, "./example", false).getPath()); + Assertions.assertEquals("/example", UriNormalizationUtil.normalizeWithBase(root, "//example", false).getPath()); + + Assertions.assertThrows(IllegalArgumentException.class, + () -> UriNormalizationUtil.normalizeWithBase(root, "/%2fexample", false)); + Assertions.assertThrows(IllegalArgumentException.class, + () -> UriNormalizationUtil.normalizeWithBase(root, "junk/../example", false)); + Assertions.assertThrows(IllegalArgumentException.class, + () -> UriNormalizationUtil.normalizeWithBase(root, "junk/../../example", false)); + Assertions.assertThrows(IllegalArgumentException.class, + () -> UriNormalizationUtil.normalizeWithBase(root, "../example", false)); + } +} diff --git a/core/deployment/src/test/resources/META-INF/services/test.javax.servlet.ServletContainerInitializer b/core/deployment/src/test/resources/META-INF/services/test.javax.servlet.ServletContainerInitializer index aa489494e591f..653bff528651c 100644 --- a/core/deployment/src/test/resources/META-INF/services/test.javax.servlet.ServletContainerInitializer +++ b/core/deployment/src/test/resources/META-INF/services/test.javax.servlet.ServletContainerInitializer @@ -13,7 +13,7 @@ org.apache.logging.log4j.core.Appender # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/core/devmode-spi/src/main/java/io/quarkus/dev/ErrorPageGenerators.java b/core/devmode-spi/src/main/java/io/quarkus/dev/ErrorPageGenerators.java new file mode 100644 index 0000000000000..cce84323bee02 --- /dev/null +++ b/core/devmode-spi/src/main/java/io/quarkus/dev/ErrorPageGenerators.java @@ -0,0 +1,39 @@ +package io.quarkus.dev; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; + +/** + * The generators can be used to generate a custom HTML page for a specific deployment exception that occurs during the + * development mode. + *

+ * In order to avoid classloading issues the generators should not access the root cause directly but use reflection instead + * (the exception class could be loaded by a different class loader). + */ +public class ErrorPageGenerators { + + private static final Map> generators = new ConcurrentHashMap<>(); + + /** + * Register a function that will be used to generate the error page for the given root cause. + * + * @param rootCauseClassName + * @param function + */ + public static void register(String rootCauseClassName, Function function) { + if (generators.putIfAbsent(rootCauseClassName, function) != null) { + throw new IllegalStateException("Template builder already specified for: " + rootCauseClassName); + } + } + + public static Function get(String rootCauseClassName) { + return generators.get(rootCauseClassName); + } + + // This method is called by a relevant service provider during HotReplacementSetup#close() + public static void clear() { + generators.clear(); + } + +} diff --git a/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleManager.java b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleManager.java new file mode 100644 index 0000000000000..8495572d37f15 --- /dev/null +++ b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleManager.java @@ -0,0 +1,82 @@ +package io.quarkus.dev.console; + +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; + +import io.quarkus.dev.spi.HotReplacementContext; + +public class DevConsoleManager { + + private static volatile Consumer handler; + private static volatile Map> templateInfo; + private static volatile HotReplacementContext hotReplacementContext; + private static volatile Object quarkusBootstrap; + /** + * Global map that can be used to share data betweeen the runtime and deployment side + * to enable communication. + *

+ * Key names should be namespaced. + *

+ * As the class loaders are different these objects will generally need to implement some kind of common interface + */ + private static Map globals = new ConcurrentHashMap<>(); + + public static void registerHandler(Consumer requestHandler) { + handler = requestHandler; + } + + public static void sentRequest(DevConsoleRequest request) { + Consumer handler = DevConsoleManager.handler; + if (handler == null) { + request.getResponse().complete(new DevConsoleResponse(503, Collections.emptyMap(), new byte[0])); //service unavailable + } else { + handler.accept(request); + } + } + + public static Map> getTemplateInfo() { + return templateInfo; + } + + public static void setTemplateInfo(Map> templateInfo) { + DevConsoleManager.templateInfo = templateInfo; + } + + public static HotReplacementContext getHotReplacementContext() { + return hotReplacementContext; + } + + public static void setHotReplacementContext(HotReplacementContext hotReplacementContext) { + DevConsoleManager.hotReplacementContext = hotReplacementContext; + } + + public static void setQuarkusBootstrap(Object qb) { + quarkusBootstrap = qb; + } + + public static Object getQuarkusBootstrap() { + return quarkusBootstrap; + } + + /** + * Sets a global that is shared between both the runtime and deployment parts + * + * @param name A namespaced key + * @param value A value + */ + public static void setGlobal(String name, Object value) { + globals.put(name, value); + } + + /** + * Gets a shared global + * + * @param name The key + * @return The value + */ + public static T getGlobal(String name) { + return (T) globals.get(name); + } +} diff --git a/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleRequest.java b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleRequest.java new file mode 100644 index 0000000000000..c16d791c5884a --- /dev/null +++ b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleRequest.java @@ -0,0 +1,42 @@ +package io.quarkus.dev.console; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +public class DevConsoleRequest { + + private final String method; + private final String uri; + private final Map> headers; + private final byte[] body; + private final CompletableFuture response = new CompletableFuture<>(); + + public DevConsoleRequest(String method, String uri, Map> headers, byte[] body) { + this.method = method; + this.uri = uri; + this.headers = headers; + this.body = body; + } + + public String getMethod() { + return method; + } + + public Map> getHeaders() { + return headers; + } + + public byte[] getBody() { + return body; + } + + public String getUri() { + return uri; + } + + public CompletableFuture getResponse() { + return response; + } + +} diff --git a/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleResponse.java b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleResponse.java new file mode 100644 index 0000000000000..17fa07a8499f9 --- /dev/null +++ b/core/devmode-spi/src/main/java/io/quarkus/dev/console/DevConsoleResponse.java @@ -0,0 +1,51 @@ +package io.quarkus.dev.console; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DevConsoleResponse { + + private int status; + private Map> headers = new HashMap<>(); + private byte[] body; + + public DevConsoleResponse(int status, Map> headers, byte[] body) { + this.status = status; + this.headers = headers; + this.body = body; + } + + public DevConsoleResponse() { + this.status = 200; + this.headers = new HashMap<>(); + this.body = new byte[0]; + } + + public int getStatus() { + return status; + } + + public Map> getHeaders() { + return headers; + } + + public byte[] getBody() { + return body; + } + + public DevConsoleResponse setStatus(int status) { + this.status = status; + return this; + } + + public DevConsoleResponse setHeaders(Map> headers) { + this.headers = headers; + return this; + } + + public DevConsoleResponse setBody(byte[] body) { + this.body = body; + return this; + } +} diff --git a/core/devmode-spi/src/main/java/io/quarkus/dev/console/TempSystemProperties.java b/core/devmode-spi/src/main/java/io/quarkus/dev/console/TempSystemProperties.java new file mode 100644 index 0000000000000..0c31a909ca390 --- /dev/null +++ b/core/devmode-spi/src/main/java/io/quarkus/dev/console/TempSystemProperties.java @@ -0,0 +1,27 @@ +package io.quarkus.dev.console; + +import java.util.HashMap; +import java.util.Map; + +/** + * TODO: this is a hack, we should be able to pass config overrides into the bootstrap + */ +public class TempSystemProperties implements AutoCloseable { + final Map old = new HashMap<>(); + + public void set(String key, String value) { + old.put(key, System.getProperty(key)); + System.setProperty(key, value); + } + + @Override + public void close() { + for (Map.Entry e : old.entrySet()) { + if (e.getValue() == null) { + System.clearProperty(e.getKey()); + } else { + System.setProperty(e.getKey(), e.getValue()); + } + } + } +} diff --git a/core/launcher/pom.xml b/core/launcher/pom.xml index 2d7267d384642..1c518816d2a6b 100644 --- a/core/launcher/pom.xml +++ b/core/launcher/pom.xml @@ -22,6 +22,12 @@ + + + src/main/resources + true + + org.apache.maven.plugins diff --git a/core/launcher/src/main/java/io/quarkus/launcher/JBangDevModeLauncher.java b/core/launcher/src/main/java/io/quarkus/launcher/JBangDevModeLauncher.java new file mode 100644 index 0000000000000..b9159c941d24c --- /dev/null +++ b/core/launcher/src/main/java/io/quarkus/launcher/JBangDevModeLauncher.java @@ -0,0 +1,40 @@ +package io.quarkus.launcher; + +import java.util.HashMap; +import java.util.Map; + +import io.quarkus.bootstrap.BootstrapConstants; + +/** + * JBang dev mode entry point. Utilises the same shaded info as the IDE launcher, but JBang has a different + * launch path. + * + * Unlike the IDE launch use case in this case we have been able to store some info from the build phase, + * so we don't need as many tricks. + * + * The launcher module has all its dependencies shaded, so it is effectively self contained. This allows deployment time + * code to not leak into runtime code, as the launcher artifact is explicitly excluded from the production build via a + * hard coded exclusion. + */ +public class JBangDevModeLauncher { + + public static void main(String... args) { + System.clearProperty("quarkus.dev"); //avoid unknown config key warnings + final ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); + try { + + Map context = new HashMap<>(); + context.put("args", args); + RuntimeLaunchClassLoader loader = new RuntimeLaunchClassLoader(JBangDevModeLauncher.class.getClassLoader()); + Thread.currentThread().setContextClassLoader(loader); + + Class launcher = loader.loadClass("io.quarkus.bootstrap.jbang.JBangDevModeLauncherImpl"); + launcher.getDeclaredMethod("main", String[].class).invoke(null, (Object) args); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + System.clearProperty(BootstrapConstants.SERIALIZED_APP_MODEL); + Thread.currentThread().setContextClassLoader(originalCl); + } + } +} diff --git a/core/launcher/src/main/java/io/quarkus/launcher/JBangIntegration.java b/core/launcher/src/main/java/io/quarkus/launcher/JBangIntegration.java new file mode 100644 index 0000000000000..9d548efe5e74f --- /dev/null +++ b/core/launcher/src/main/java/io/quarkus/launcher/JBangIntegration.java @@ -0,0 +1,135 @@ +package io.quarkus.launcher; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import io.quarkus.bootstrap.BootstrapConstants; + +public class JBangIntegration { + + public static final String CONFIG = "//Q:CONFIG"; + + public static Map postBuild(Path appClasses, Path pomFile, List> repositories, + List> originalDeps, + List comments, boolean nativeImage) throws IOException { + //dev mode takes a very different path + if (Boolean.getBoolean("quarkus.dev")) { + System.clearProperty("quarkus.dev"); //avoid unknown config key warnings + HashMap files = new HashMap<>(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + DataOutputStream dataOutputStream = new DataOutputStream(outputStream); + //pom file contents + String pomContents = String.join("\n", Files.readAllLines(pomFile)); + dataOutputStream.writeUTF(pomContents); + //Source file locations + dataOutputStream.writeUTF(appClasses.toAbsolutePath().toString()); + dataOutputStream.writeUTF(System.getProperty("jbang.source")); + dataOutputStream.writeInt(originalDeps.size()); + for (Map.Entry i : originalDeps) { + dataOutputStream.writeUTF(i.getKey()); + dataOutputStream.writeUTF(i.getValue().toAbsolutePath().toString()); + } + dataOutputStream.close(); + files.put("jbang-dev.dat", outputStream.toByteArray()); + HashMap ret = new HashMap<>(); + ret.put("files", files); + ret.put("main-class", "io.quarkus.launcher.JBangDevModeLauncher"); + return ret; + } + + for (String comment : comments) { + //we allow config to be provided via //Q:CONFIG name=value + if (comment.startsWith(CONFIG)) { + String conf = comment.substring(CONFIG.length()).trim(); + int equals = conf.indexOf("="); + if (equals == -1) { + throw new RuntimeException("invalid config " + comment); + } + System.setProperty(conf.substring(0, equals), conf.substring(equals + 1)); + } + } + + //huge hack + //jbang does not consider the pom when resolving dependencies + //so can get the wrong version of asm + //we remove it from the deps list so we can use the correct one + List> dependencies = new ArrayList<>(); + for (Map.Entry i : originalDeps) { + if (!i.getKey().startsWith("org.ow2.asm:asm:")) { + dependencies.add(i); + } + } + + ClassLoader old = Thread.currentThread().getContextClassLoader(); + try { + RuntimeLaunchClassLoader loader = new RuntimeLaunchClassLoader( + new ClassLoader(JBangIntegration.class.getClassLoader()) { + @Override + public Class loadClass(String name) throws ClassNotFoundException { + return loadClass(name, false); + } + + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + if (name.startsWith("org.") && !(name.startsWith("org.xml.") || name.startsWith("org.w3c.") + || name.startsWith("org.jboss."))) { + //jbang has some but not all of the maven resolver classes we need on its + //class path. These all start with org. so we filter them out to make sure + //we get a complete class path + throw new ClassNotFoundException(); + } + return super.loadClass(name, resolve); + } + + @Override + public URL getResource(String name) { + if (name.startsWith("org/") && !(name.startsWith("org/xml/") || name.startsWith("org/w3c/") + || name.startsWith("org/jboss/"))) { + //jbang has some but not all of the maven resolver classes we need on its + //class path. These all start with org. so we filter them out to make sure + //we get a complete class path + return null; + } + return super.getResource(name); + } + + @Override + public Enumeration getResources(String name) throws IOException { + if (name.startsWith("org/") && !(name.startsWith("org/xml/") || name.startsWith("org/w3c/") + || name.startsWith("org/jboss/"))) { + //jbang has some but not all of the maven resolver classes we need on its + //class path. These all start with org. so we filter them out to make sure + //we get a complete class path + return Collections.emptyEnumeration(); + } + return super.getResources(name); + } + }); + Thread.currentThread().setContextClassLoader(loader); + Class launcher = loader.loadClass("io.quarkus.bootstrap.jbang.JBangBuilderImpl"); + return (Map) launcher + .getDeclaredMethod("postBuild", Path.class, Path.class, List.class, List.class, boolean.class).invoke( + null, + appClasses, + pomFile, + repositories, + dependencies, + nativeImage); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + System.clearProperty(BootstrapConstants.SERIALIZED_APP_MODEL); + Thread.currentThread().setContextClassLoader(old); + } + } +} diff --git a/core/launcher/src/main/java/io/quarkus/launcher/QuarkusLauncher.java b/core/launcher/src/main/java/io/quarkus/launcher/QuarkusLauncher.java index d62c8154b75c8..7393aedcbfabf 100644 --- a/core/launcher/src/main/java/io/quarkus/launcher/QuarkusLauncher.java +++ b/core/launcher/src/main/java/io/quarkus/launcher/QuarkusLauncher.java @@ -1,15 +1,13 @@ package io.quarkus.launcher; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; +import java.io.Closeable; +import java.net.JarURLConnection; +import java.net.URI; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Enumeration; import java.util.HashMap; import java.util.Map; -import java.util.function.Consumer; import io.quarkus.bootstrap.BootstrapConstants; @@ -25,7 +23,8 @@ */ public class QuarkusLauncher { - public static void launch(String callingClass, String quarkusApplication, Consumer exitHandler, String... args) { + public static Closeable launch(String callingClass, String quarkusApplication, String... args) { + final ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); try { String classResource = callingClass.replace(".", "/") + ".class"; URL resource = Thread.currentThread().getContextClassLoader().getResource(classResource); @@ -33,7 +32,15 @@ public static void launch(String callingClass, String quarkusApplication, Consum path = path.substring(0, path.length() - classResource.length()); URL newResource = new URL(resource.getProtocol(), resource.getHost(), resource.getPort(), path); - Path appClasses = Paths.get(newResource.toURI()); + URI uri = newResource.toURI(); + Path appClasses; + if ("jar".equals(uri.getScheme())) { + JarURLConnection connection = (JarURLConnection) uri.toURL().openConnection(); + connection.setDefaultUseCaches(false); + appClasses = Paths.get(connection.getJarFileURL().toURI()); + } else { + appClasses = Paths.get(uri); + } if (quarkusApplication != null) { System.setProperty("quarkus.package.main-class", quarkusApplication); } @@ -42,103 +49,16 @@ public static void launch(String callingClass, String quarkusApplication, Consum context.put("app-classes", appClasses); context.put("args", args); - IDEClassLoader loader = new IDEClassLoader(QuarkusLauncher.class.getClassLoader()); + RuntimeLaunchClassLoader loader = new RuntimeLaunchClassLoader(QuarkusLauncher.class.getClassLoader()); Thread.currentThread().setContextClassLoader(loader); Class launcher = loader.loadClass("io.quarkus.bootstrap.IDELauncherImpl"); - launcher.getDeclaredMethod("launch", Path.class, Map.class).invoke(null, appClasses, context); - + return (Closeable) launcher.getDeclaredMethod("launch", Path.class, Map.class).invoke(null, appClasses, context); } catch (Exception e) { throw new RuntimeException(e); } finally { System.clearProperty(BootstrapConstants.SERIALIZED_APP_MODEL); - } - } - - public static class IDEClassLoader extends ClassLoader { - - static { - registerAsParallelCapable(); - } - - public IDEClassLoader(ClassLoader parent) { - super(parent); - } - - @Override - protected Class findClass(String name) throws ClassNotFoundException { - String resourceName = name.replace(".", "/") + ".class"; - try { - try (InputStream is = getResourceAsStream(resourceName)) { - if (is == null) { - throw new ClassNotFoundException(name); - } - definePackage(name); - byte[] buf = new byte[1024]; - int r; - ByteArrayOutputStream out = new ByteArrayOutputStream(); - while ((r = is.read(buf)) > 0) { - out.write(buf, 0, r); - } - byte[] bytes = out.toByteArray(); - - return defineClass(name, bytes, 0, bytes.length); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - - } - - private void definePackage(String name) { - final String pkgName = getPackageNameFromClassName(name); - if ((pkgName != null) && getPackage(pkgName) == null) { - synchronized (getClassLoadingLock(pkgName)) { - if (getPackage(pkgName) == null) { - // this could certainly be improved to use the actual manifest - definePackage(pkgName, null, null, null, null, null, null, null); - } - } - } - } - - private String getPackageNameFromClassName(String className) { - final int index = className.lastIndexOf('.'); - if (index == -1) { - // we return null here since in this case no package is defined - // this is same behavior as Package.getPackage(clazz) exhibits - // when the class is in the default package - return null; - } - return className.substring(0, index); - } - - protected Class findClass(String moduleName, String name) { - try { - return findClass(name); - } catch (ClassNotFoundException e) { - return null; - } - } - - protected URL findResource(String moduleName, String name) throws IOException { - return findResource(name); - } - - @Override - protected URL findResource(String name) { - if (!name.startsWith("/")) { - name = "/" + name; - } - return getParent().getResource("META-INF/ide-deps" + name + ".ide-launcher-res"); - } - - @Override - protected Enumeration findResources(String name) throws IOException { - if (!name.startsWith("/")) { - name = "/" + name; - } - return getParent().getResources("META-INF/ide-deps" + name + ".ide-launcher-res"); + Thread.currentThread().setContextClassLoader(originalCl); } } } diff --git a/core/launcher/src/main/java/io/quarkus/launcher/RuntimeLaunchClassLoader.java b/core/launcher/src/main/java/io/quarkus/launcher/RuntimeLaunchClassLoader.java new file mode 100644 index 0000000000000..691afcc0f3c7b --- /dev/null +++ b/core/launcher/src/main/java/io/quarkus/launcher/RuntimeLaunchClassLoader.java @@ -0,0 +1,94 @@ +package io.quarkus.launcher; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.Enumeration; + +public class RuntimeLaunchClassLoader extends ClassLoader { + + static { + registerAsParallelCapable(); + } + + public RuntimeLaunchClassLoader(ClassLoader parent) { + super(parent); + } + + @Override + protected Class findClass(String name) throws ClassNotFoundException { + String resourceName = name.replace(".", "/") + ".class"; + try { + try (InputStream is = getResourceAsStream(resourceName)) { + if (is == null) { + throw new ClassNotFoundException(name); + } + definePackage(name); + byte[] buf = new byte[1024]; + int r; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + while ((r = is.read(buf)) > 0) { + out.write(buf, 0, r); + } + byte[] bytes = out.toByteArray(); + + return defineClass(name, bytes, 0, bytes.length); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + + } + + private void definePackage(String name) { + final String pkgName = getPackageNameFromClassName(name); + if ((pkgName != null) && getPackage(pkgName) == null) { + synchronized (getClassLoadingLock(pkgName)) { + if (getPackage(pkgName) == null) { + // this could certainly be improved to use the actual manifest + definePackage(pkgName, null, null, null, null, null, null, null); + } + } + } + } + + private String getPackageNameFromClassName(String className) { + final int index = className.lastIndexOf('.'); + if (index == -1) { + // we return null here since in this case no package is defined + // this is same behavior as Package.getPackage(clazz) exhibits + // when the class is in the default package + return null; + } + return className.substring(0, index); + } + + protected Class findClass(String moduleName, String name) { + try { + return findClass(name); + } catch (ClassNotFoundException e) { + return null; + } + } + + protected URL findResource(String moduleName, String name) throws IOException { + return findResource(name); + } + + @Override + protected URL findResource(String name) { + if (!name.startsWith("/")) { + name = "/" + name; + } + return getParent().getResource("META-INF/ide-deps" + name + ".ide-launcher-res"); + } + + @Override + protected Enumeration findResources(String name) throws IOException { + if (!name.startsWith("/")) { + name = "/" + name; + } + return getParent().getResources("META-INF/ide-deps" + name + ".ide-launcher-res"); + } +} diff --git a/core/launcher/src/main/resources/META-INF/jbang-integration.list b/core/launcher/src/main/resources/META-INF/jbang-integration.list new file mode 100644 index 0000000000000..8138163fea18c --- /dev/null +++ b/core/launcher/src/main/resources/META-INF/jbang-integration.list @@ -0,0 +1,3 @@ +#This integration allows JBang to run Quarkus + +io.quarkus.launcher.JBangIntegration #the integration class \ No newline at end of file diff --git a/core/pom.xml b/core/pom.xml index 0106a397747e4..8bff7ca665180 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,16 +21,6 @@ test-extension devmode-spi launcher + class-change-agent - - - - - maven-compiler-plugin - - true - - - - diff --git a/core/processor/pom.xml b/core/processor/pom.xml index 91ee51d8339c6..244d5c178727a 100644 --- a/core/processor/pom.xml +++ b/core/processor/pom.xml @@ -8,7 +8,6 @@ io.quarkus quarkus-core-parent 999-SNAPSHOT - ../ quarkus-extension-processor @@ -56,6 +55,11 @@ + + org.jboss.logmanager + jboss-logmanager-embedded + test + diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java b/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java index 198a9c20b1670..534ad8ac3a09d 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java @@ -38,9 +38,10 @@ final public class Constants { public static final String COMMON = "common"; public static final String RUNTIME = "runtime"; public static final String DEPLOYMENT = "deployment"; + public static final String CONFIG = "config"; public static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^.+[\\.$](\\w+)$"); - public static final Pattern PKG_PATTERN = Pattern.compile("^io\\.quarkus\\.(\\w+)\\.?(\\w+)?\\.?(\\w+)?"); + public static final Pattern PKG_PATTERN = Pattern.compile("^io\\.quarkus\\.(\\w+)\\.?(\\w+)?\\.?(\\w+)?\\.?(\\w+)?"); public static final String INSTANCE_SYM = "__instance"; public static final String QUARKUS = "quarkus"; @@ -80,7 +81,7 @@ final public class Constants { "%n%s Configuration property fixed at build time - All other configuration properties are overridable at runtime", CONFIG_PHASE_BUILD_TIME_ILLUSTRATION); - public static final String DURATION_FORMAT_NOTE = "\n[NOTE]" + + public static final String DURATION_FORMAT_NOTE = "\nifndef::no-duration-note[]\n[NOTE]" + "\n[[" + DURATION_NOTE_ANCHOR + "]]\n" + ".About the Duration format\n" + "====\n" + @@ -91,7 +92,8 @@ final public class Constants { "You can also provide duration values starting with a number.\n" + "In this case, if the value consists only of a number, the converter treats the value as seconds.\n" + "Otherwise, `PT` is implicitly prepended to the value to obtain a standard `java.time.Duration` format.\n" + - "====\n"; + "====\n" + + "endif::no-duration-note[]\n"; public static final String MEMORY_SIZE_FORMAT_NOTE = "\n[NOTE]" + "\n[[" + MEMORY_SIZE_NOTE_ANCHOR + "]]\n" + diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java index e2e63846ac68b..cc7c803eed793 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java @@ -78,7 +78,7 @@ public class ExtensionAnnotationProcessor extends AbstractProcessor { private final ConfigDocItemScanner configDocItemScanner = new ConfigDocItemScanner(); private final Set generatedAccessors = new ConcurrentHashMap().keySet(Boolean.TRUE); private final Set generatedJavaDocs = new ConcurrentHashMap().keySet(Boolean.TRUE); - private final boolean generateDocs = !Boolean.getBoolean("skipDocs"); + private final boolean generateDocs = !(Boolean.getBoolean("skipDocs") || Boolean.getBoolean("quickly")); public ExtensionAnnotationProcessor() { } @@ -100,11 +100,15 @@ public SourceVersion getSupportedSourceVersion() { @Override public boolean process(Set annotations, RoundEnvironment roundEnv) { - doProcess(annotations, roundEnv); - if (roundEnv.processingOver()) { - doFinish(); + try { + doProcess(annotations, roundEnv); + if (roundEnv.processingOver()) { + doFinish(); + } + return true; + } finally { + JDeparser.dropCaches(); } - return true; } @Override diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java index e437ec25b21c8..97133943a7890 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java @@ -19,7 +19,7 @@ import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.stringifyType; import java.io.IOException; -import java.nio.file.Path; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -62,14 +62,16 @@ class ConfigDoItemFinder { private final Properties javaDocProperties; private final Map configGroupQualifiedNameToTypeElementMap; private final FsMap allConfigurationGroups; + private final FsMap allConfigurationRoots; public ConfigDoItemFinder(Set configRoots, Map configGroupQualifiedNameToTypeElementMap, - Properties javaDocProperties, Path allConfigurationGroupsDir) { + Properties javaDocProperties, FsMap allConfigurationGroups, FsMap allConfigurationRoots) { this.configRoots = configRoots; this.configGroupQualifiedNameToTypeElementMap = configGroupQualifiedNameToTypeElementMap; this.javaDocProperties = javaDocProperties; - this.allConfigurationGroups = new FsMap(allConfigurationGroupsDir); + this.allConfigurationGroups = allConfigurationGroups; + this.allConfigurationRoots = allConfigurationRoots; } /** @@ -77,7 +79,6 @@ public ConfigDoItemFinder(Set configRoots, * Scan configuration group first and record them in a properties file as they can be shared across * different modules. * - * @param allConfigurationGroups */ ScannedConfigDocsItemHolder findInMemoryConfigurationItems() throws IOException { @@ -97,6 +98,7 @@ ScannedConfigDocsItemHolder findInMemoryConfigurationItems() throws IOException final List configDocItems = recursivelyFindConfigItems(element, rootName, rootName, configPhase, false, sectionLevel, true); holder.addConfigRootItems(configRootInfo, configDocItems); + allConfigurationRoots.put(configRootInfo.getClazz().toString(), OBJECT_MAPPER.writeValueAsString(configDocItems)); } return holder; @@ -109,6 +111,27 @@ private List recursivelyFindConfigItems(Element element, String r ConfigPhase configPhase, boolean withinAMap, int sectionLevel, boolean generateSeparateConfigGroupDocsFiles) throws JsonProcessingException { List configDocItems = new ArrayList<>(); + TypeElement asTypeElement = (TypeElement) element; + TypeMirror superType = asTypeElement.getSuperclass(); + if (superType.getKind() != TypeKind.NONE) { + String key = superType.toString(); + String rawConfigItems = allConfigurationGroups.get(key); + if (rawConfigItems == null) { + rawConfigItems = allConfigurationRoots.get(key); + } + final List superTypeConfigItems; + if (rawConfigItems == null) { // element not yet scanned + Element superElement = ((DeclaredType) superType).asElement(); + superTypeConfigItems = recursivelyFindConfigItems(superElement, rootName, parentName, + configPhase, withinAMap, sectionLevel, generateSeparateConfigGroupDocsFiles); + } else { + superTypeConfigItems = OBJECT_MAPPER.readValue(rawConfigItems, LIST_OF_CONFIG_ITEMS_TYPE_REF); + } + + configDocItems.addAll(superTypeConfigItems); + + } + for (Element enclosedElement : element.getEnclosedElements()) { if (!enclosedElement.getKind().isField()) { continue; @@ -207,7 +230,7 @@ private List recursivelyFindConfigItems(Element element, String r if (isConfigGroup(type)) { List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, type, configSection, withinAMap, generateSeparateConfigGroupDocsFiles); - configDocItems.addAll(groupConfigItems); + DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); } else { final ConfigDocKey configDocKey = new ConfigDocKey(); configDocKey.setWithinAMap(withinAMap); @@ -217,7 +240,8 @@ private List recursivelyFindConfigItems(Element element, String r DeclaredType declaredType = (DeclaredType) typeMirror; TypeElement typeElement = (TypeElement) declaredType.asElement(); Name qualifiedName = typeElement.getQualifiedName(); - optional = qualifiedName.toString().startsWith(Optional.class.getName()); + optional = qualifiedName.toString().startsWith(Optional.class.getName()) + || qualifiedName.contentEquals(Map.class.getName()); list = qualifiedName.contentEquals(List.class.getName()) || qualifiedName.contentEquals(Set.class.getName()); @@ -230,7 +254,7 @@ private List recursivelyFindConfigItems(Element element, String r name += String.format(NAMED_MAP_CONFIG_ITEM_FORMAT, configDocMapKey); List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, type, configSection, true, generateSeparateConfigGroupDocsFiles); - configDocItems.addAll(groupConfigItems); + DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); continue; } else { type = BACK_TICK + stringifyType(declaredType) + BACK_TICK; @@ -256,7 +280,7 @@ private List recursivelyFindConfigItems(Element element, String r configSection.setOptional(true); List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, typeInString, configSection, withinAMap, generateSeparateConfigGroupDocsFiles); - configDocItems.addAll(groupConfigItems); + DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); continue; } else if ((typeInString.startsWith(List.class.getName()) || typeInString.startsWith(Set.class.getName()) @@ -287,6 +311,8 @@ private List recursivelyFindConfigItems(Element element, String r defaultValue = hyphenateEnumValue(defaultValue); } acceptedValues = extractEnumValues(declaredType, useHyphenateEnumValue); + } else if (isDurationType(declaredType) && !defaultValue.isEmpty()) { + defaultValue = DocGeneratorUtil.normalizeDurationValue(defaultValue); } } } @@ -361,6 +387,10 @@ private boolean isEnumType(TypeMirror realTypeMirror) { && ((DeclaredType) realTypeMirror).asElement().getKind() == ElementKind.ENUM; } + private boolean isDurationType(TypeMirror realTypeMirror) { + return realTypeMirror.toString().equals(Duration.class.getName()); + } + /** * Scan or parse configuration items of a given configuration group. *

diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java index 1e1a43e130293..f0cf97b161be1 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java @@ -5,7 +5,6 @@ import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.deriveConfigRootName; import java.io.IOException; -import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -32,15 +31,17 @@ final public class ConfigDocItemScanner { private final Map configGroupsToTypeElement = new HashMap<>(); private final FsMap allExtensionGeneratedDocs; - private final Path allConfigurationGroupsDir = Constants.GENERATED_DOCS_PATH - .resolve("all-configuration-groups-generated-doc"); + private final FsMap allConfigGroupGeneratedDocs; private final FsMultiMap configurationRootsParExtensionFileName; public ConfigDocItemScanner() { this.allExtensionGeneratedDocs = new FsMap(Constants.GENERATED_DOCS_PATH .resolve("all-configuration-roots-generated-doc")); + this.allConfigGroupGeneratedDocs = new FsMap(Constants.GENERATED_DOCS_PATH + .resolve("all-configuration-groups-generated-doc")); this.configurationRootsParExtensionFileName = new FsMultiMap(Constants.GENERATED_DOCS_PATH .resolve("extensions-configuration-roots-list")); + } /** @@ -108,7 +109,7 @@ public Set scanExtensionsConfigurationItems(Properties Set configDocGeneratedOutputs = new HashSet<>(); final ConfigDoItemFinder configDoItemFinder = new ConfigDoItemFinder(configRoots, configGroupsToTypeElement, - javaDocProperties, allConfigurationGroupsDir); + javaDocProperties, allConfigGroupGeneratedDocs, allExtensionGeneratedDocs); final ScannedConfigDocsItemHolder inMemoryScannedItemsHolder = configDoItemFinder.findInMemoryConfigurationItems(); if (!inMemoryScannedItemsHolder.isEmpty()) { diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtil.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtil.java index d52d4f9ba9006..e18488139ceba 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtil.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtil.java @@ -257,6 +257,17 @@ static String hyphenateEnumValue(String orig) { return target.toString(); } + static String normalizeDurationValue(String value) { + if (!value.isEmpty() && Character.isDigit(value.charAt(value.length() - 1))) { + try { + value = Integer.parseInt(value) + "S"; + } catch (NumberFormatException ignore) { + } + } + value = value.toUpperCase(Locale.ROOT); + return value; + } + static String joinAcceptedValues(List acceptedValues) { if (acceptedValues == null || acceptedValues.isEmpty()) { return ""; @@ -293,28 +304,29 @@ public static String computeExtensionDocFileName(String configRoot) { extensionNameBuilder.append(configRoot); } else { String extensionName = matcher.group(1); - final String subgroup = matcher.group(2); extensionNameBuilder.append(Constants.QUARKUS); extensionNameBuilder.append(Constants.DASH); if (Constants.DEPLOYMENT.equals(extensionName) || Constants.RUNTIME.equals(extensionName)) { extensionNameBuilder.append(CORE); - } else if (subgroup != null && !Constants.DEPLOYMENT.equals(subgroup) - && !Constants.RUNTIME.equals(subgroup) && !Constants.COMMON.equals(subgroup) - && subgroup.matches(Constants.DIGIT_OR_LOWERCASE)) { + } else { extensionNameBuilder.append(extensionName); - extensionNameBuilder.append(Constants.DASH); - extensionNameBuilder.append(subgroup); - - final String qualifier = matcher.group(3); - if (qualifier != null && !Constants.DEPLOYMENT.equals(qualifier) - && !Constants.RUNTIME.equals(qualifier) && !Constants.COMMON.equals(qualifier) - && qualifier.matches(Constants.DIGIT_OR_LOWERCASE)) { + for (int i = 2; i <= matcher.groupCount(); i++) { + String subgroup = matcher.group(i); + if (Constants.DEPLOYMENT.equals(subgroup) + || Constants.RUNTIME.equals(subgroup) + || Constants.COMMON.equals(subgroup) + || !subgroup.matches(Constants.DIGIT_OR_LOWERCASE)) { + break; + } + if (i > 3 && Constants.CONFIG.equals(subgroup)) { + // this is a bit dark magic but we have config packages as valid extension names + // and config packages where the configuration is stored + break; + } extensionNameBuilder.append(Constants.DASH); - extensionNameBuilder.append(qualifier); + extensionNameBuilder.append(matcher.group(i)); } - } else { - extensionNameBuilder.append(extensionName); } } @@ -441,7 +453,7 @@ private static boolean mergeSectionIntoPreviousExistingConfigItems(ConfigDocSect ConfigDocSection configDocSection = configDocItem.getConfigDocSection(); if (configDocSection.equals(section)) { - configDocSection.addConfigDocItems(section.getConfigDocItems()); + appendConfigItemsIntoExistingOnes(configDocSection.getConfigDocItems(), section.getConfigDocItems()); return true; } else { boolean configSectionMerged = mergeSectionIntoPreviousExistingConfigItems(section, diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java index 105ef8a76ead5..ac6e2260cc2fe 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java @@ -91,7 +91,8 @@ public void format(Writer writer, ConfigDocKey configDocKey) throws IOException @Override public void format(Writer writer, ConfigDocSection configDocSection) throws IOException { if (configDocSection.isShowSection()) { - String anchor = anchorPrefix + getAnchor(configDocSection.getName()); + String anchor = anchorPrefix + + getAnchor(configDocSection.getName() + Constants.DASH + configDocSection.getSectionDetailsTitle()); String sectionTitle = String.format(SECTION_TITLE, anchor, anchor, configDocSection.getSectionDetailsTitle()); final String sectionRow = String.format(TABLE_SECTION_ROW_FORMAT, sectionTitle, configDocSection.isOptional() ? "This configuration section is optional" : Constants.EMPTY); diff --git a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtilTest.java b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtilTest.java index d27fb8fefc25e..5266429f28734 100644 --- a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtilTest.java +++ b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/DocGeneratorUtilTest.java @@ -9,6 +9,7 @@ import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.computeExtensionDocFileName; import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.deriveConfigRootName; import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.getJavaDocSiteLink; +import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.normalizeDurationValue; import static org.junit.jupiter.api.Assertions.assertEquals; import java.math.BigInteger; @@ -354,4 +355,16 @@ public void derivingConfigRootNameTestCase() { actual = deriveConfigRootName(simpleClassName, ConfigPhase.BUILD_TIME); assertEquals("quarkus.root-name", actual); } + + @Test + public void normalizeDurationValueTest() { + assertEquals("", normalizeDurationValue("")); + assertEquals("1S", normalizeDurationValue("1")); + assertEquals("1S", normalizeDurationValue("1S")); + assertEquals("1S", normalizeDurationValue("1s")); + + // values are not validated here + assertEquals("1_000", normalizeDurationValue("1_000")); + assertEquals("FOO", normalizeDurationValue("foo")); + } } diff --git a/core/runtime/pom.xml b/core/runtime/pom.xml index 6a2c968baaafb..1979550e4b4fc 100644 --- a/core/runtime/pom.xml +++ b/core/runtime/pom.xml @@ -8,7 +8,6 @@ io.quarkus quarkus-core-parent 999-SNAPSHOT - ../ quarkus-core @@ -72,7 +71,7 @@ org.jboss.slf4j - slf4j-jboss-logging + slf4j-jboss-logmanager org.graalvm.sdk @@ -81,6 +80,7 @@ org.graalvm.nativeimage svm + provided org.wildfly.common @@ -118,18 +118,22 @@ io.quarkus:quarkus-bootstrap-runner + io.smallrye.common:smallrye-common-io org.wildfly.common:wildfly-common org.graalvm.sdk:graal-sdk org.graalvm.nativeimage:svm io.quarkus:quarkus-bootstrap-core io.quarkus:quarkus-development-mode-spi io.quarkus:quarkus-bootstrap-app-model + io.quarkus:quarkus-bootstrap-maven-resolver + org.slf4j:slf4j-api + org.jboss.slf4j:slf4j-jboss-logmanager org.jboss.logmanager:jboss-logmanager-embedded org.jboss.logging:jboss-logging - org.ow2.asm:asm - org.ow2.asm:asm-commons org.apache.maven:maven-model + org.apache.maven.resolver:maven-resolver-api org.codehaus.plexus:plexus-utils + xml-apis:xml-apis + org.jboss.logging:jboss-logging + org.jboss.logmanager:jboss-logmanager-embedded + + org.wildfly.common:wildfly-common + + io.smallrye.common:smallrye-common-io + + org.codehaus.groovy:groovy + io.smallrye:smallrye-config javax.enterprise:cdi-api diff --git a/core/runtime/src/main/java/io/quarkus/runtime/Application.java b/core/runtime/src/main/java/io/quarkus/runtime/Application.java index 709b391261e0a..ac632628c9636 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/Application.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/Application.java @@ -228,16 +228,15 @@ public void awaitShutdown() { stateLock.lock(); try { for (;;) { - switch (state) { - case ST_STOPPED: - return; // all good - default: - try { - stateCond.await(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw interruptedOnAwaitStop(); - } + if (state == ST_STOPPED) { + return; // all good + } else { + try { + stateCond.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw interruptedOnAwaitStop(); + } } } } finally { diff --git a/core/runtime/src/main/java/io/quarkus/runtime/ApplicationLifecycleManager.java b/core/runtime/src/main/java/io/quarkus/runtime/ApplicationLifecycleManager.java index 2316305db6432..41e1e4643481c 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/ApplicationLifecycleManager.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/ApplicationLifecycleManager.java @@ -1,9 +1,12 @@ package io.quarkus.runtime; +import java.net.BindException; +import java.util.Locale; import java.util.Objects; import java.util.Set; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; +import java.util.function.BiConsumer; import java.util.function.Consumer; import javax.enterprise.context.spi.CreationalContext; @@ -12,12 +15,14 @@ import javax.enterprise.inject.spi.BeanManager; import javax.enterprise.inject.spi.CDI; +import org.eclipse.microprofile.config.spi.ConfigProviderResolver; import org.graalvm.nativeimage.ImageInfo; import org.jboss.logging.Logger; import org.wildfly.common.lock.Locks; -import com.oracle.svm.core.OS; - +import io.quarkus.bootstrap.runner.RunnerClassLoader; +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.runtime.configuration.ProfileManager; import io.quarkus.runtime.graal.DiagnosticPrinter; import sun.misc.Signal; import sun.misc.SignalHandler; @@ -29,7 +34,7 @@ * but nothing else. This class can be used to run both persistent applications that will run * till they receive a signal, and command mode applications that will run until the main method * returns. This class registers a shutdown hook to properly shut down the application, and handles - * exiting with the supplied exit code. + * exiting with the supplied exit code as well as any exception thrown when starting the application. * * This class should be used to run production and dev mode applications, while test use cases will * likely want to just use {@link Application} directly. @@ -39,9 +44,9 @@ */ public class ApplicationLifecycleManager { - private static volatile Consumer defaultExitCodeHandler = new Consumer() { + private static volatile BiConsumer defaultExitCodeHandler = new BiConsumer() { @Override - public void accept(Integer integer) { + public void accept(Integer integer, Throwable cause) { System.exit(integer); } }; @@ -63,18 +68,21 @@ private ApplicationLifecycleManager() { private static boolean hooksRegistered; private static boolean vmShuttingDown; + private static final boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); + private static final boolean IS_MAC = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("mac"); + public static void run(Application application, String... args) { run(application, null, null, args); } public static void run(Application application, Class quarkusApplication, - Consumer exitCodeHandler, String... args) { + BiConsumer exitCodeHandler, String... args) { stateLock.lock(); //in tests we might pass this method an already started application //in this case we don't shut it down at the end boolean alreadyStarted = application.isStarted(); if (!hooksRegistered) { - registerHooks(); + registerHooks(exitCodeHandler == null ? defaultExitCodeHandler : exitCodeHandler); hooksRegistered = true; } if (currentApplication != null && !shutdownRequested) { @@ -87,8 +95,10 @@ public static void run(Application application, Class ctx = beanManager.createCreationalContext(bean); instance = (QuarkusApplication) beanManager.getReference(bean, quarkusApplication, ctx); @@ -124,6 +134,7 @@ public static void run(Application application, Class' or via the Task Manager."); + } else if (IS_MAC) { + applicationLogger + .info("Use 'netstat -anv | grep " + port + "' to identify the process occupying the port."); + applicationLogger.info("You can try to kill it with 'kill -9 '."); + } else { + applicationLogger + .info("Use 'netstat -anop | grep " + port + "' to identify the process occupying the port."); + applicationLogger.info("You can try to kill it with 'kill -9 '."); + } + } else if (rootCause instanceof ConfigurationException) { + System.err.println(rootCause.getMessage()); + } else { + applicationLogger.errorv(rootCause, "Failed to start application (with profile {0})", + ProfileManager.getActiveProfile()); + } + } stateLock.lock(); try { shutdownRequested = true; @@ -144,45 +184,57 @@ public static void run(Application application, Class exitCodeHandler) { if (ImageInfo.inImageRuntimeCode() && System.getenv(DISABLE_SIGNAL_HANDLERS) == null) { - registerSignalHandlers(); + registerSignalHandlers(exitCodeHandler); } final ShutdownHookThread shutdownHookThread = new ShutdownHookThread(); Runtime.getRuntime().addShutdownHook(shutdownHookThread); } - private static void registerSignalHandlers() { - final SignalHandler handler = new SignalHandler() { + private static void registerSignalHandlers(final BiConsumer exitCodeHandler) { + final SignalHandler exitHandler = new SignalHandler() { @Override public void handle(Signal signal) { - System.exit(signal.getNumber() + 0x80); + exitCodeHandler.accept(signal.getNumber() + 0x80, null); } }; - final SignalHandler quitHandler = new SignalHandler() { + final SignalHandler diagnosticsHandler = new SignalHandler() { @Override public void handle(Signal signal) { DiagnosticPrinter.printDiagnostics(System.out); } }; - handleSignal("INT", handler); - handleSignal("TERM", handler); + handleSignal("INT", exitHandler); + handleSignal("TERM", exitHandler); // the HUP and QUIT signals are not defined for the Windows OpenJDK implementation: // https://hg.openjdk.java.net/jdk8u/jdk8u-dev/hotspot/file/7d5c800dae75/src/os/windows/vm/jvm_windows.cpp - if (OS.getCurrent() == OS.WINDOWS) { - handleSignal("BREAK", quitHandler); + if (IS_WINDOWS) { + handleSignal("BREAK", diagnosticsHandler); } else { - handleSignal("HUP", handler); - handleSignal("QUIT", quitHandler); + handleSignal("HUP", exitHandler); + handleSignal("QUIT", diagnosticsHandler); } } @@ -204,7 +256,7 @@ public static void exit() { exit(-1); } - public static Consumer getDefaultExitCodeHandler() { + public static BiConsumer getDefaultExitCodeHandler() { return defaultExitCodeHandler; } @@ -217,7 +269,7 @@ public static boolean isVmShuttingDown() { } /** - * Sets the default exit code handler for application run through the run method + * Sets the default exit code and exception handler for application run through the run method * that does not take an exit handler. * * By default this will just call System.exit, however this is not always @@ -225,11 +277,24 @@ public static boolean isVmShuttingDown() { * * @param defaultExitCodeHandler the new default exit handler */ - public static void setDefaultExitCodeHandler(Consumer defaultExitCodeHandler) { + public static void setDefaultExitCodeHandler(BiConsumer defaultExitCodeHandler) { Objects.requireNonNull(defaultExitCodeHandler); ApplicationLifecycleManager.defaultExitCodeHandler = defaultExitCodeHandler; } + /** + * Sets the default exit code handler for application run through the run method + * that does not take an exit handler. + * + * By default this will just call System.exit, however this is not always + * what is wanted. + * + * @param defaultExitCodeHandler the new default exit handler + */ + public static void setDefaultExitCodeHandler(Consumer defaultExitCodeHandler) { + setDefaultExitCodeHandler((exitCode, cause) -> defaultExitCodeHandler.accept(exitCode)); + } + /** * Signals that the application should exit with the given code. * @@ -288,6 +353,12 @@ public void run() { } finally { stateLock.unlock(); } + if (currentApplication.isStarted()) { + // On CLI apps, SIGINT won't call io.quarkus.runtime.Application#stop(), + // making the awaitShutdown() below block the application termination process + // It should be a noop if called twice anyway + currentApplication.stop(); + } currentApplication.awaitShutdown(); System.out.flush(); System.err.flush(); diff --git a/core/runtime/src/main/java/io/quarkus/runtime/ConfigChangeRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/ConfigChangeRecorder.java deleted file mode 100644 index b2027bc20181e..0000000000000 --- a/core/runtime/src/main/java/io/quarkus/runtime/ConfigChangeRecorder.java +++ /dev/null @@ -1,29 +0,0 @@ -package io.quarkus.runtime; - -import java.util.Map; -import java.util.Optional; - -import org.eclipse.microprofile.config.Config; -import org.eclipse.microprofile.config.ConfigProvider; -import org.jboss.logging.Logger; - -import io.quarkus.runtime.annotations.Recorder; - -@Recorder -public class ConfigChangeRecorder { - - private static final Logger log = Logger.getLogger(ConfigChangeRecorder.class); - - public void handleConfigChange(Map buildTimeConfig) { - Config configProvider = ConfigProvider.getConfig(); - for (Map.Entry entry : buildTimeConfig.entrySet()) { - Optional val = configProvider.getOptionalValue(entry.getKey(), String.class); - if (val.isPresent()) { - if (!val.get().equals(entry.getValue())) { - log.warn("Build time property cannot be changed at runtime. " + entry.getKey() + " was " - + entry.getValue() + " at build time and is now " + val.get()); - } - } - } - } -} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/ConfigConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/ConfigConfig.java new file mode 100644 index 0000000000000..0687eecab1f0e --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/ConfigConfig.java @@ -0,0 +1,30 @@ +package io.quarkus.runtime; + +import java.util.Optional; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +/** + * We don't really use this, because these are configurations for the config itself, so it causes a chicken / egg + * problem, but we have it so the configurations can be properly documented. + * + * Relocation of the Config configurations to the Quarkus namespace is done in ConfigUtils#configBuilder. + */ +@ConfigRoot(name = ConfigItem.PARENT, phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public class ConfigConfig { + /** + * Additional config locations to be loaded with the Config. The configuration support multiple locations + * separated by a comma and each must represent a valid {@link java.net.URI}. + */ + @ConfigItem(name = "config.locations") + public Optional locations; + + /** + * Accepts a single configuration profile name. If a configuration property cannot be found in the current active + * profile, the config performs the same lookup in the profile set by this configuration. + */ + @ConfigItem(name = "config.profile.parent") + public Optional profileParent; +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/ExecutorRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/ExecutorRecorder.java index d5b0f1618c0be..372b4a0adbc93 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/ExecutorRecorder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/ExecutorRecorder.java @@ -54,7 +54,7 @@ public void run() { executor = devModeExecutor; Runtime.getRuntime().addShutdownHook(new Thread(shutdownTask, "Executor shutdown thread")); } else { - shutdownContext.addShutdownTask(shutdownTask); + shutdownContext.addLastShutdownTask(shutdownTask); executor = underlying; } if (threadPoolConfig.prefill) { diff --git a/core/runtime/src/main/java/io/quarkus/runtime/LaunchMode.java b/core/runtime/src/main/java/io/quarkus/runtime/LaunchMode.java index 008270167c6f5..2ad440b3641ad 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/LaunchMode.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/LaunchMode.java @@ -22,6 +22,13 @@ public boolean isDevOrTest() { return this != NORMAL; } + /** + * Returns true if the current launch is the server side of remote dev. + */ + public static boolean isRemoteDev() { + return (current() == DEVELOPMENT) && "true".equals(System.getenv("QUARKUS_LAUNCH_DEVMODE")); + } + private final String defaultProfile; LaunchMode(String defaultProfile) { diff --git a/core/runtime/src/main/java/io/quarkus/runtime/LiveReloadConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/LiveReloadConfig.java index 5eb3f93780a34..19b32bd5ece1d 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/LiveReloadConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/LiveReloadConfig.java @@ -1,5 +1,6 @@ package io.quarkus.runtime; +import java.time.Duration; import java.util.List; import java.util.Optional; @@ -10,6 +11,15 @@ @ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) public class LiveReloadConfig { + /** + * Whether or not Quarkus should disable it's ability to not do a full restart + * when changes to classes are compatible with JVM instrumentation. + * + * If this is set to false, Quarkus will always restart on changes and never perform class redefinition. + */ + @ConfigItem(defaultValue = "true") + boolean instrumentation; + /** * The names of additional resource files to watch for changes, triggering a reload on change. Directories are not * supported. @@ -28,4 +38,10 @@ public class LiveReloadConfig { */ @ConfigItem public Optional url; + + /** + * The amount of time to wait for a remote dev connect or reconnect + */ + @ConfigItem(defaultValue = "30s") + public Duration connectTimeout; } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/Quarkus.java b/core/runtime/src/main/java/io/quarkus/runtime/Quarkus.java index 904778df2e217..e6ecf8b5f4d11 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/Quarkus.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/Quarkus.java @@ -1,6 +1,8 @@ package io.quarkus.runtime; -import java.util.function.Consumer; +import java.io.Closeable; +import java.io.IOException; +import java.util.function.BiConsumer; import org.jboss.logging.Logger; import org.jboss.logmanager.LogManager; @@ -24,6 +26,8 @@ public class Quarkus { //WARNING: this is too early to inject a logger //private static final Logger log = Logger.getLogger(Quarkus.class); + private static Closeable LAUNCHED_FROM_IDE; + /** * Runs a quarkus application, that will run until the provided {@link QuarkusApplication} has completed. * @@ -46,10 +50,11 @@ public static void run(Class quarkusApplication, S * go into the QuarkusApplication. * * @param quarkusApplication The application to run, or null - * @param exitHandler The handler that is called with the exit code when the application has finished + * @param exitHandler The handler that is called with the exit code and any exception (if any) thrown when the application + * has finished * @param args The command line parameters */ - public static void run(Class quarkusApplication, Consumer exitHandler, + public static void run(Class quarkusApplication, BiConsumer exitHandler, String... args) { try { System.setProperty("java.util.logging.manager", LogManager.class.getName()); @@ -63,12 +68,11 @@ public static void run(Class quarkusApplication, C } catch (ClassNotFoundException e) { //ignore, this happens when running in dev mode } catch (Exception e) { - //TODO: exception mappers - Logger.getLogger(Quarkus.class).error("Error running Quarkus", e); if (exitHandler != null) { - exitHandler.accept(1); + exitHandler.accept(1, e); } else { - ApplicationLifecycleManager.getDefaultExitCodeHandler().accept(1); + Logger.getLogger(Quarkus.class).error("Error running Quarkus", e); + ApplicationLifecycleManager.getDefaultExitCodeHandler().accept(1, e); } return; } @@ -76,12 +80,11 @@ public static void run(Class quarkusApplication, C //dev mode path, i.e. launching from the IDE //this is not the quarkus:dev path as it will augment before //calling this method - launchFromIDE(quarkusApplication, exitHandler, args); + launchFromIDE(quarkusApplication, args); } - private static void launchFromIDE(Class quarkusApplication, Consumer exitHandler, - String... args) { + private static void launchFromIDE(Class quarkusApplication, String... args) { //some trickery, get the class that has invoked us, and use this to figure out the //classes root StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); @@ -90,8 +93,18 @@ private static void launchFromIDE(Class quarkusApp pos++; } String callingClass = stackTrace[pos].getClassName(); - QuarkusLauncher.launch(callingClass, quarkusApplication == null ? null : quarkusApplication.getName(), exitHandler, - args); + LAUNCHED_FROM_IDE = QuarkusLauncher.launch(callingClass, + quarkusApplication == null ? null : quarkusApplication.getName(), args); + } + + private static void terminateForIDE() { + if (LAUNCHED_FROM_IDE != null) { + try { + LAUNCHED_FROM_IDE.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } } /** @@ -122,6 +135,7 @@ public static void run(String... args) { * @param code The exit code. This may be overridden if an exception occurs on shutdown */ public static void asyncExit(int code) { + terminateForIDE(); ApplicationLifecycleManager.exit(code); } @@ -138,6 +152,7 @@ public static void asyncExit(int code) { * */ public static void asyncExit() { + terminateForIDE(); ApplicationLifecycleManager.exit(-1); } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/ShutdownContext.java b/core/runtime/src/main/java/io/quarkus/runtime/ShutdownContext.java index 54995c59efba7..126ddad2bad19 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/ShutdownContext.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/ShutdownContext.java @@ -1,5 +1,10 @@ package io.quarkus.runtime; +import java.io.Closeable; +import java.io.IOException; + +import org.jboss.logging.Logger; + /** * A context that can be passed into runtime recorders that allows for shutdown tasks to be added. * @@ -11,4 +16,26 @@ public interface ShutdownContext { // these are executed after all the ones add via addShutdownTask in the reverse order from which they were added void addLastShutdownTask(Runnable runnable); + + class CloseRunnable implements Runnable { + + static final Logger log = Logger.getLogger(ShutdownContext.class); + + private final Closeable closeable; + + public CloseRunnable(Closeable closeable) { + this.closeable = closeable; + } + + @Override + public void run() { + try { + if (closeable != null) { + closeable.close(); + } + } catch (IOException e) { + log.error("Failed to close " + closeable, e); + } + } + } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/TemplateHtmlBuilder.java b/core/runtime/src/main/java/io/quarkus/runtime/TemplateHtmlBuilder.java index 3062422ca8843..f7ea8a14a058a 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/TemplateHtmlBuilder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/TemplateHtmlBuilder.java @@ -58,6 +58,8 @@ public class TemplateHtmlBuilder { private static final String ANCHOR_TEMPLATE = "/%2$s"; + private static final String DESCRIPTION_TEMPLATE = "%1$s — %2$s"; + private static final String RESOURCE_TEMPLATE = "

%1$s

\n"; private static final String LIST_START = "
    \n"; @@ -67,6 +69,8 @@ public class TemplateHtmlBuilder { private static final String METHOD_IO = "
  • %1$s: %2$s
  • \n"; + private static final String LIST_ITEM = "
  • %s
  • \n"; + private static final String METHOD_END = "
\n" + ""; @@ -213,24 +217,34 @@ public TemplateHtmlBuilder noResourcesFound() { } public TemplateHtmlBuilder resourcePath(String title) { - return resourcePath(title, true, false); + return resourcePath(title, true, false, null); } public TemplateHtmlBuilder staticResourcePath(String title) { - return resourcePath(title, false, true); + return staticResourcePath(title, null); + } + + public TemplateHtmlBuilder staticResourcePath(String title, String description) { + return resourcePath(title, false, true, description); } public TemplateHtmlBuilder servletMapping(String title) { - return resourcePath(title, false, false); + return resourcePath(title, false, false, null); } - private TemplateHtmlBuilder resourcePath(String title, boolean withListStart, boolean withAnchor) { + private TemplateHtmlBuilder resourcePath(String title, boolean withListStart, boolean withAnchor, String description) { String content; if (withAnchor) { + if (title.startsWith("/")) { + title = title.substring(1); + } content = String.format(ANCHOR_TEMPLATE, title, escapeHtml(title)); } else { content = escapeHtml(title); } + if (description != null && !description.isEmpty()) { + content = String.format(DESCRIPTION_TEMPLATE, content, description); + } result.append(String.format(RESOURCE_TEMPLATE, content)); if (withListStart) { result.append(LIST_START); @@ -253,6 +267,11 @@ public TemplateHtmlBuilder produces(String produces) { return this; } + public TemplateHtmlBuilder listItem(String content) { + result.append(String.format(LIST_ITEM, escapeHtml(content))); + return this; + } + public TemplateHtmlBuilder methodEnd() { result.append(METHOD_END); return this; @@ -268,6 +287,11 @@ public TemplateHtmlBuilder resourceEnd() { return this; } + public TemplateHtmlBuilder append(String html) { + result.append(html); + return this; + } + @Override public String toString() { return result.append(HTML_TEMPLATE_END).toString(); @@ -284,12 +308,25 @@ private static String escapeHtml(final String bodyText) { .replace(">", ">"); } - private static String extractFirstLine(final String message) { - if (null == message) { - return ""; - } + public static String adjustRoot(String httpRoot, String basePath) { + //httpRoot can optionally end with a slash + //also some templates want the returned path to start with a / and some don't + //to make this work we check if the basePath starts with a / or not, and make sure we + //the return value follows the same pattern - String[] lines = message.split("\\r?\\n"); - return lines[0].trim(); + if (httpRoot.equals("/")) { + //leave it alone + return basePath; + } + if (basePath.startsWith("/")) { + if (!httpRoot.endsWith("/")) { + return httpRoot + basePath; + } + return httpRoot.substring(0, httpRoot.length() - 1) + basePath; + } + if (httpRoot.endsWith("/")) { + return httpRoot.substring(1) + basePath; + } + return httpRoot.substring(1) + "/" + basePath; } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/TlsConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/TlsConfig.java new file mode 100644 index 0000000000000..eb75cfa60af9d --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/TlsConfig.java @@ -0,0 +1,25 @@ +package io.quarkus.runtime; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +/** + * Configuration class allowing to globally set TLS properties. + */ +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public class TlsConfig { + + /** + * Enable trusting all certificates. Disable by default. + */ + @ConfigItem(defaultValue = "false") + public boolean trustAll; + + @Override + public String toString() { + return "TlsConfig{" + + "trustAll=" + trustAll + + '}'; + } +} \ No newline at end of file diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/IgnoreProperty.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/IgnoreProperty.java new file mode 100644 index 0000000000000..2adb486fc20e0 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/IgnoreProperty.java @@ -0,0 +1,16 @@ +package io.quarkus.runtime.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Indicates that a field or method should not be considered when attempting to construct a recorded object. + * The annotation is meant to be used on a field or a getter method. + * + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.FIELD }) +public @interface IgnoreProperty { +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/RecordableConstructor.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RecordableConstructor.java index 925848610e2b7..8a649b648910b 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/annotations/RecordableConstructor.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RecordableConstructor.java @@ -11,6 +11,8 @@ * The constructor parameters will be read from the fields of the object, and matched * to the constructor parameter names. * + * TODO: move this out of Quarkus core and into a tiny annotation-only module + * that could then be used outside of Quarkus (for example in RESTEasy Reactive) */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/RelaxedValidation.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RelaxedValidation.java new file mode 100644 index 0000000000000..13c01e642eee3 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RelaxedValidation.java @@ -0,0 +1,17 @@ +package io.quarkus.runtime.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Indicates that a given recorded parameter should have relaxed validation. + * + * Normally if a field cannot be serialized to bytecode an exception will be thrown, + * if this annotation is present the field is simply ignored. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.PARAMETER) +public @interface RelaxedValidation { +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSource.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSource.java deleted file mode 100644 index d8415f850e0bd..0000000000000 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSource.java +++ /dev/null @@ -1,118 +0,0 @@ -package io.quarkus.runtime.configuration; - -import java.io.BufferedReader; -import java.io.Closeable; -import java.io.FileNotFoundException; -import java.io.IOError; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.Map; -import java.util.Properties; - -import io.smallrye.config.PropertiesConfigSource; - -/** - * A configuration source for {@code application.properties}. - */ -public abstract class ApplicationPropertiesConfigSource extends PropertiesConfigSource { - private static final long serialVersionUID = -4694780118527396798L; - - static final String APPLICATION_PROPERTIES = "application.properties"; - static final String MP_PROPERTIES = "META-INF/microprofile-config.properties"; - - ApplicationPropertiesConfigSource(InputStream is, int ordinal) { - super(readProperties(is), APPLICATION_PROPERTIES, ordinal); - } - - ApplicationPropertiesConfigSource(InputStream is, String nm, int ordinal) { - super(readProperties(is), nm, ordinal); - } - - private static Map readProperties(final InputStream is) { - if (is == null) { - return Collections.emptyMap(); - } - try (Closeable ignored = is) { - try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8)) { - try (BufferedReader br = new BufferedReader(isr)) { - final Properties properties = new Properties(); - properties.load(br); - return (Map) (Map) properties; - } - } - } catch (IOException e) { - throw new IOError(e); - } - } - - public static final class InJar extends ApplicationPropertiesConfigSource { - public InJar() { - super(openStream(), 250); - } - - private static InputStream openStream() { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - if (cl == null) { - cl = ApplicationPropertiesConfigSource.class.getClassLoader(); - } - InputStream is; - if (cl == null) { - is = ClassLoader.getSystemResourceAsStream(APPLICATION_PROPERTIES); - } else { - is = cl.getResourceAsStream(APPLICATION_PROPERTIES); - } - return is; - } - } - - /** - * Config that makes sure that the MP config in the application takes precedence over any other on the class path - */ - public static final class MpConfigInJar extends ApplicationPropertiesConfigSource { - public MpConfigInJar() { - super(openStream(), MP_PROPERTIES, 240); - } - - private static InputStream openStream() { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - if (cl == null) { - cl = ApplicationPropertiesConfigSource.class.getClassLoader(); - } - InputStream is; - if (cl == null) { - is = ClassLoader.getSystemResourceAsStream(MP_PROPERTIES); - } else { - is = cl.getResourceAsStream(MP_PROPERTIES); - } - return is; - } - } - - public static final class InFileSystem extends ApplicationPropertiesConfigSource { - public InFileSystem() { - super(openStream(), 260); - } - - private static InputStream openStream() { - final Path path = Paths.get("config", APPLICATION_PROPERTIES); - if (Files.exists(path)) { - try { - return Files.newInputStream(path); - } catch (NoSuchFileException | FileNotFoundException e) { - return null; - } catch (IOException e) { - throw new IOError(e); - } - } else { - return null; - } - } - } -} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSourceLoader.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSourceLoader.java new file mode 100644 index 0000000000000..f5ac2de74bf0a --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ApplicationPropertiesConfigSourceLoader.java @@ -0,0 +1,59 @@ +package io.quarkus.runtime.configuration; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.microprofile.config.spi.ConfigSource; +import org.eclipse.microprofile.config.spi.ConfigSourceProvider; + +import io.smallrye.config.AbstractLocationConfigSourceLoader; +import io.smallrye.config.PropertiesConfigSource; + +public class ApplicationPropertiesConfigSourceLoader extends AbstractLocationConfigSourceLoader { + @Override + protected String[] getFileExtensions() { + return new String[] { "properties" }; + } + + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return new PropertiesConfigSource(url, ordinal); + } + + public static class InClassPath extends ApplicationPropertiesConfigSourceLoader implements ConfigSourceProvider { + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return super.loadConfigSource(url, 250); + } + + @Override + public List getConfigSources(final ClassLoader classLoader) { + return loadConfigSources("application.properties", classLoader); + } + + @Override + protected List tryFileSystem(final URI uri) { + return new ArrayList<>(); + } + } + + public static class InFileSystem extends ApplicationPropertiesConfigSourceLoader implements ConfigSourceProvider { + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return super.loadConfigSource(url, 260); + } + + @Override + public List getConfigSources(final ClassLoader classLoader) { + return loadConfigSources("config/application.properties", classLoader); + } + + @Override + protected List tryClassPath(final URI uri, final ClassLoader classLoader) { + return new ArrayList<>(); + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigChangeRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigChangeRecorder.java new file mode 100644 index 0000000000000..eb7a8e09bb58f --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigChangeRecorder.java @@ -0,0 +1,53 @@ +package io.quarkus.runtime.configuration; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.ConfigProvider; +import org.jboss.logging.Logger; + +import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.runtime.configuration.ConfigurationRuntimeConfig.BuildTimeMismatchAtRuntime; + +@Recorder +public class ConfigChangeRecorder { + + private static final Logger log = Logger.getLogger(ConfigChangeRecorder.class); + + public void handleConfigChange(ConfigurationRuntimeConfig configurationConfig, Map buildTimeConfig) { + Config configProvider = ConfigProvider.getConfig(); + List mismatches = null; + for (Map.Entry entry : buildTimeConfig.entrySet()) { + Optional val = configProvider.getOptionalValue(entry.getKey(), String.class); + if (val.isPresent()) { + if (!val.get().equals(entry.getValue())) { + if (mismatches == null) { + mismatches = new ArrayList<>(); + } + mismatches.add( + " - " + entry.getKey() + " was '" + entry.getValue() + "' at build time and is now '" + val.get() + + "'"); + } + } + } + if (mismatches != null && !mismatches.isEmpty()) { + final String msg = "Build time property cannot be changed at runtime:\n" + + mismatches.stream().collect(Collectors.joining("\n")); + switch (configurationConfig.buildTimeMismatchAtRuntime) { + case fail: + throw new IllegalStateException(msg); + case warn: + log.warn(msg); + break; + default: + throw new IllegalStateException("Unexpected " + BuildTimeMismatchAtRuntime.class.getName() + ": " + + configurationConfig.buildTimeMismatchAtRuntime); + } + + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java index 808b71eb41548..5be6e70fbcb82 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java @@ -23,18 +23,15 @@ private ConfigDiagnostic() { public static void invalidValue(String name, IllegalArgumentException ex) { final String message = ex.getMessage(); - final String loggedMessage = String.format("An invalid value was given for configuration key \"%s\": %s", name, - message == null ? ex.toString() : message); - log.error(loggedMessage); + final String loggedMessage = message != null ? message + : String.format("An invalid value was given for configuration key \"%s\"", name); errorsMessages.add(loggedMessage); } public static void missingValue(String name, NoSuchElementException ex) { final String message = ex.getMessage(); - final String loggedMessage = String.format("Configuration key \"%s\" is required, but its value is empty/missing: %s", - name, - message == null ? ex.toString() : message); - log.error(loggedMessage); + final String loggedMessage = message != null ? message + : String.format("Configuration key \"%s\" is required, but its value is empty/missing", name); errorsMessages.add(loggedMessage); } @@ -49,7 +46,7 @@ public static void deprecated(String name) { public static void unknown(String name) { log.warnf( - "Unrecognized configuration key \"%s\" was provided; it will be ignored; verify that the dependency extension for this configuration is set or you did not make a typo", + "Unrecognized configuration key \"%s\" was provided; it will be ignored; verify that the dependency extension for this configuration is set or that you did not make a typo", name); } @@ -87,6 +84,12 @@ public static void resetError() { } public static String getNiceErrorMessage() { - return String.join("\n", errorsMessages); + StringBuilder b = new StringBuilder(); + for (String errorsMessage : errorsMessages) { + b.append(" - "); + b.append(errorsMessage); + b.append(System.lineSeparator()); + } + return b.toString(); } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigInstantiator.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigInstantiator.java index 96b763be8d38c..495d9df9074c1 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigInstantiator.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigInstantiator.java @@ -65,7 +65,7 @@ private static void handleObject(String prefix, Object o, SmallRyeConfig config) return; } for (Field field : cls.getDeclaredFields()) { - if (Modifier.isFinal(field.getModifiers())) { + if (field.isSynthetic() || Modifier.isFinal(field.getModifiers())) { continue; } field.setAccessible(true); diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigUtils.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigUtils.java index 273525ababad2..c8628317f3ee7 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigUtils.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigUtils.java @@ -1,37 +1,43 @@ package io.quarkus.runtime.configuration; -import java.io.FileNotFoundException; +import static io.smallrye.config.AbstractLocationConfigSourceFactory.SMALLRYE_LOCATIONS; +import static io.smallrye.config.DotEnvConfigSourceProvider.dotEnvSources; +import static io.smallrye.config.ProfileConfigSourceInterceptor.SMALLRYE_PROFILE; +import static io.smallrye.config.ProfileConfigSourceInterceptor.SMALLRYE_PROFILE_PARENT; +import static io.smallrye.config.PropertiesConfigSourceProvider.classPathSources; +import static io.smallrye.config.SmallRyeConfigBuilder.META_INF_MICROPROFILE_CONFIG_PROPERTIES; +import static io.smallrye.config.SmallRyeConfigBuilder.WEB_INF_MICROPROFILE_CONFIG_PROPERTIES; + import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.nio.file.Paths; +import java.net.URL; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; -import java.util.Locale; import java.util.Map; -import java.util.Properties; +import java.util.OptionalInt; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; -import java.util.concurrent.ConcurrentHashMap; import java.util.function.IntFunction; -import java.util.regex.Pattern; import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.ConfigSourceProvider; import org.jboss.logging.Logger; +import io.smallrye.config.ConfigSourceInterceptor; +import io.smallrye.config.ConfigSourceInterceptorContext; +import io.smallrye.config.ConfigSourceInterceptorFactory; +import io.smallrye.config.DotEnvConfigSourceProvider; +import io.smallrye.config.EnvConfigSource; +import io.smallrye.config.Priorities; +import io.smallrye.config.RelocateConfigSourceInterceptor; import io.smallrye.config.SmallRyeConfigBuilder; +import io.smallrye.config.SysPropConfigSource; +import io.smallrye.config.common.utils.ConfigSourceUtil; /** * @@ -67,26 +73,38 @@ public static SmallRyeConfigBuilder configBuilder(final boolean runTime) { */ public static SmallRyeConfigBuilder configBuilder(final boolean runTime, final boolean addDiscovered) { final SmallRyeConfigBuilder builder = new SmallRyeConfigBuilder(); - final ApplicationPropertiesConfigSource.InFileSystem inFileSystem = new ApplicationPropertiesConfigSource.InFileSystem(); - final ApplicationPropertiesConfigSource.InJar inJar = new ApplicationPropertiesConfigSource.InJar(); - final ApplicationPropertiesConfigSource.MpConfigInJar mpConfig = new ApplicationPropertiesConfigSource.MpConfigInJar(); - builder.withSources(inFileSystem, inJar, mpConfig, new DotEnvConfigSource()); - builder.withProfile(ProfileManager.getActiveProfile()); - builder.addDefaultInterceptors(); + builder.withDefaultValue(SMALLRYE_PROFILE, ProfileManager.getActiveProfile()); + + final Map relocations = new HashMap<>(); + relocations.put(SMALLRYE_LOCATIONS, "quarkus.config.locations"); + relocations.put(SMALLRYE_PROFILE_PARENT, "quarkus.config.profile.parent"); + // Override the priority, because of the ProfileConfigSourceInterceptor and profile.parent. + builder.withInterceptorFactories(new ConfigSourceInterceptorFactory() { + @Override + public ConfigSourceInterceptor getInterceptor(final ConfigSourceInterceptorContext context) { + return new RelocateConfigSourceInterceptor(relocations); + } + + @Override + public OptionalInt getPriority() { + return OptionalInt.of(Priorities.LIBRARY + 600 - 5); + } + }); + final ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + builder.withSources(new ApplicationPropertiesConfigSourceLoader.InFileSystem().getConfigSources(classLoader)); + builder.withSources(new ApplicationPropertiesConfigSourceLoader.InClassPath().getConfigSources(classLoader)); + builder.addDefaultInterceptors(); if (runTime) { builder.addDefaultSources(); + builder.withSources(dotEnvSources(classLoader)); } else { final List sources = new ArrayList<>(); - sources.addAll( - new QuarkusPropertiesConfigSourceProvider("META-INF/microprofile-config.properties", true, classLoader) - .getConfigSources(classLoader)); - // required by spec... - sources.addAll( - new QuarkusPropertiesConfigSourceProvider("WEB-INF/classes/META-INF/microprofile-config.properties", true, - classLoader).getConfigSources(classLoader)); - sources.add(new EnvConfigSource()); - sources.add(new SysPropConfigSource()); + sources.addAll(classPathSources(META_INF_MICROPROFILE_CONFIG_PROPERTIES, classLoader)); + sources.addAll(classPathSources(WEB_INF_MICROPROFILE_CONFIG_PROPERTIES, classLoader)); + sources.addAll(new BuildTimeDotEnvConfigSourceProvider().getConfigSources(classLoader)); + sources.add(new BuildTimeEnvConfigSource()); + sources.add(new BuildTimeSysPropConfigSource()); builder.withSources(sources); } if (addDiscovered) { @@ -122,86 +140,57 @@ public static void addSourceProviders(SmallRyeConfigBuilder builder, Collection< } } - static class EnvConfigSource implements ConfigSource, Serializable { - private static final String NULL_SENTINEL = new String(""); //must be a new string, used for == comparison - private static final long serialVersionUID = 8786096039970882529L; - - static final Pattern REP_PATTERN = Pattern.compile("[^a-zA-Z0-9_]"); - private final Map cache = new ConcurrentHashMap<>(); //the regex match is expensive - - EnvConfigSource() { - } - - public int getOrdinal() { - return 300; - } - - public Map getProperties() { - return Collections.emptyMap(); + /** + * We override the EnvConfigSource, because we don't want the nothing back from getPropertiesNames at build time. + * The mapping is one way and there is no way to map them back. + */ + static class BuildTimeEnvConfigSource extends EnvConfigSource { + BuildTimeEnvConfigSource() { + super(); } - public String getValue(final String propertyName) { - String val = cache.get(propertyName); - if (val != null) { - if (val == NULL_SENTINEL) { - return null; - } - return val; - } - val = getRawValue(REP_PATTERN.matcher(propertyName.toUpperCase(Locale.ROOT)).replaceAll("_")); - cache.put(propertyName, val == null ? NULL_SENTINEL : val); - return val; + BuildTimeEnvConfigSource(final Map propertyMap, final int ordinal) { + super(propertyMap, ordinal); } - String getRawValue(final String name) { - return System.getenv(name); + @Override + public Set getPropertyNames() { + return new HashSet<>(); } + @Override public String getName() { return "System environment"; } } - static class DotEnvConfigSource extends EnvConfigSource { - private static final long serialVersionUID = -6718168105190376482L; - - private final Map values; - - DotEnvConfigSource() { - this(Paths.get(System.getProperty("user.dir", "."), ".env")); - } - - DotEnvConfigSource(Path path) { - Map values = new HashMap<>(); - try (InputStream is = Files.newInputStream(path)) { - try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8)) { - final Properties properties = new Properties(); - properties.load(isr); - for (String name : properties.stringPropertyNames()) { - values.put(name, properties.getProperty(name)); - } - } - } catch (FileNotFoundException | NoSuchFileException ignored) { - } catch (IOException e) { - log.debug("Failed to load `.env` file", e); - } - this.values = values; - } - - public int getOrdinal() { - return 295; + /** + * Same as BuildTimeEnvConfigSource. + */ + static class BuildTimeDotEnvConfigSourceProvider extends DotEnvConfigSourceProvider { + public BuildTimeDotEnvConfigSourceProvider() { + super(); } - String getRawValue(final String name) { - return values.get(name); + public BuildTimeDotEnvConfigSourceProvider(final String location) { + super(location); } - public String getName() { - return ".env"; + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return new BuildTimeEnvConfigSource(ConfigSourceUtil.urlToMap(url), ordinal) { + @Override + public String getName() { + return super.getName() + "[source=" + url + "]"; + } + }; } } - static final class SysPropConfigSource implements ConfigSource { + /** + * We only want to include properties in the quarkus namespace. + */ + static class BuildTimeSysPropConfigSource extends SysPropConfigSource { public Map getProperties() { Map output = new TreeMap<>(); for (Map.Entry entry : System.getProperties().entrySet()) { @@ -213,16 +202,8 @@ public Map getProperties() { return output; } - public String getValue(final String propertyName) { - return System.getProperty(propertyName); - } - public String getName() { return "System properties"; } - - public int getOrdinal() { - return 400; - } } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationRuntimeConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationRuntimeConfig.java new file mode 100644 index 0000000000000..6f31791effa8c --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigurationRuntimeConfig.java @@ -0,0 +1,29 @@ +package io.quarkus.runtime.configuration; + +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot(name = "configuration", phase = ConfigPhase.RUN_TIME) +public class ConfigurationRuntimeConfig { + + /** + * What should happen if the application is started with a different build time configuration than it was compiled + * against. This may be useful to prevent misconfiguration. + *

+ * If this is set to {@code warn} the application will warn at start up. + *

+ * If this is set to {@code fail} the application will fail at start up. + *

+ * Native tests leveraging@io.quarkus.test.junit.TestProfile are always run with + * {@code quarkus.configuration.build-time-mismatch-at-runtime = fail}. + */ + @ConfigItem(defaultValue = "warn") + public BuildTimeMismatchAtRuntime buildTimeMismatchAtRuntime; + + public enum BuildTimeMismatchAtRuntime { + warn, + fail + } + +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/DurationConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/DurationConverter.java index 8c04d55c5e4aa..08ea4b6b8553a 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/DurationConverter.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/DurationConverter.java @@ -29,11 +29,23 @@ public DurationConverter() { * If the value consists only of a number, it implicitly treats the value as seconds. * Otherwise, tries to convert the value assuming that it is in the accepted ISO-8601 duration format. * - * @param value + * @param value duration as String * @return {@link Duration} */ @Override public Duration convert(String value) { + return parseDuration(value); + } + + /** + * Converts a value which start with a number by implicitly appending `PT` to it. + * If the value consists only of a number, it implicitly treats the value as seconds. + * Otherwise, tries to convert the value assuming that it is in the accepted ISO-8601 duration format. + * + * @param value duration as String + * @return {@link Duration} + */ + public static Duration parseDuration(String value) { value = value.trim(); if (value.isEmpty()) { return null; @@ -51,6 +63,5 @@ public Duration convert(String value) { } catch (DateTimeParseException e) { throw new IllegalArgumentException(e); } - } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/LocaleConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/LocaleConverter.java index 304668aa737d2..cab0ff13948ff 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/LocaleConverter.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/LocaleConverter.java @@ -30,7 +30,7 @@ public Locale convert(final String value) { } Locale locale = Locale.forLanguageTag(NORMALIZE_LOCALE_PATTERN.matcher(localeValue).replaceAll("-")); - if (locale.getLanguage() == null || locale.getLanguage().isEmpty()) { + if (locale != Locale.ROOT && (locale.getLanguage() == null || locale.getLanguage().isEmpty())) { throw new IllegalArgumentException("Unable to resolve locale: " + value); } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ProfileManager.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ProfileManager.java index ef365615603d7..f96509ee0088e 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ProfileManager.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ProfileManager.java @@ -37,6 +37,7 @@ public static void setRuntimeDefaultProfile(final String profile) { runtimeDefaultProfile = profile; } + //NOTE: changes made here must be replicated in BootstrapProfileManager public static String getActiveProfile() { if (launchMode == LaunchMode.TEST) { String profile = System.getProperty(QUARKUS_TEST_PROFILE_PROP); diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java index 539c1a4bbf373..0ea4c1503e66a 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java @@ -28,8 +28,11 @@ public SmallRyeConfig getConfigFor(final SmallRyeConfigProviderResolver configPr //the HTTP port or logging info return configProviderResolver.getBuilder().forClassLoader(classLoader) .addDefaultSources() + .addDefaultInterceptors() .addDiscoveredSources() .addDiscoveredConverters() + .addDiscoveredInterceptors() + .withProfile(ProfileManager.getActiveProfile()) .build(); } return config; diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java index 8754453787476..3a35bda308015 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java @@ -12,6 +12,8 @@ import com.oracle.svm.core.threadlocal.FastThreadLocalFactory; import com.oracle.svm.core.threadlocal.FastThreadLocalInt; +import io.smallrye.common.constraint.Assert; +import io.smallrye.config.ConfigMappingMetadata; import io.smallrye.config.Expressions; /** @@ -52,4 +54,44 @@ public static T withoutExpansion(Supplier supplier) { } } } + + @TargetClass(className = "io.smallrye.config.ConfigMappingLoader") + static final class Target_ConfigMappingLoader { + @Substitute + static Class loadClass(final Class parent, final ConfigMappingMetadata configMappingMetadata) { + try { + return parent.getClassLoader().loadClass(configMappingMetadata.getClassName()); + } catch (ClassNotFoundException e) { + return null; + } + } + + @Substitute + private static Class defineClass(final Class parent, final String className, final byte[] classBytes) { + return null; + } + } + + @TargetClass(className = "io.smallrye.config.ConfigMappingInterface") + static final class Target_ConfigMappingInterface { + @Alias + static ClassValue cv = null; + + // ClassValue is substituted by a regular ConcurrentHashMap - java.lang.ClassValue.get(JavaLangSubstitutions.java:514) + @Substitute + public static Target_ConfigMappingInterface getConfigurationInterface(Class interfaceType) { + Assert.checkNotNullParam("interfaceType", interfaceType); + try { + return cv.get(interfaceType); + } catch (NullPointerException e) { + return null; + } + } + + // This should not be called, but we substitute it anyway to make sure we remove any references to ASM classes. + @Substitute + public byte[] getClassBytes() { + return null; + } + } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/TrimmedStringConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/TrimmedStringConverter.java new file mode 100644 index 0000000000000..4588db6b50e8f --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/TrimmedStringConverter.java @@ -0,0 +1,17 @@ +package io.quarkus.runtime.configuration; + +import org.eclipse.microprofile.config.spi.Converter; + +public class TrimmedStringConverter implements Converter { + + public TrimmedStringConverter() { + } + + @Override + public String convert(String s) { + if (s == null) { + return s; + } + return s.trim(); + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ZoneIdConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ZoneIdConverter.java new file mode 100644 index 0000000000000..9303eefb66409 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ZoneIdConverter.java @@ -0,0 +1,24 @@ +package io.quarkus.runtime.configuration; + +import static io.quarkus.runtime.configuration.ConverterSupport.DEFAULT_QUARKUS_CONVERTER_PRIORITY; + +import java.io.Serializable; +import java.time.ZoneId; + +import javax.annotation.Priority; + +import org.eclipse.microprofile.config.spi.Converter; + +/** + * A converter to support ZoneId. + */ +@Priority(DEFAULT_QUARKUS_CONVERTER_PRIORITY) +public class ZoneIdConverter implements Converter, Serializable { + + private static final long serialVersionUID = -439010527617997936L; + + @Override + public ZoneId convert(final String value) { + return ZoneId.of(value); + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/ClassDefinerSubstitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/ClassDefinerSubstitutions.java new file mode 100644 index 0000000000000..3760352943b86 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/ClassDefinerSubstitutions.java @@ -0,0 +1,18 @@ +package io.quarkus.runtime.graal; + +import java.lang.invoke.MethodHandles; + +import com.oracle.svm.core.annotate.Delete; +import com.oracle.svm.core.annotate.TargetClass; + +import io.smallrye.common.classloader.ClassDefiner; + +final class ClassDefinerSubstitutions { + @TargetClass(ClassDefiner.class) + static final class Target_io_smallrye_common_classloader_ClassDefiner { + @Delete + public static Class defineClass(MethodHandles.Lookup lookup, Class parent, String className, byte[] classBytes) { + return null; + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/JavaIOSubstitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/JavaIOSubstitutions.java index 1b7e66746f6e4..fa6629ebf663e 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/JavaIOSubstitutions.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/JavaIOSubstitutions.java @@ -1,11 +1,16 @@ package io.quarkus.runtime.graal; import java.io.ObjectStreamClass; +import java.util.function.BooleanSupplier; + +import org.graalvm.home.Version; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; -@TargetClass(java.io.ObjectStreamClass.class) +import io.quarkus.runtime.graal.Target_java_io_ObjectStreamClass.GraalVM20OrEarlier; + +@TargetClass(value = java.io.ObjectStreamClass.class, onlyWith = GraalVM20OrEarlier.class) @SuppressWarnings({ "unused" }) final class Target_java_io_ObjectStreamClass { @@ -22,4 +27,11 @@ private Target_java_io_ObjectStreamClass() { throw new UnsupportedOperationException("Not supported"); } + static class GraalVM20OrEarlier implements BooleanSupplier { + + @Override + public boolean getAsBoolean() { + return Version.getCurrent().compareTo(21) < 0; + } + } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/LoggingSubstitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/LoggingSubstitutions.java index e97cbce756a57..2b1967c2b7a69 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/LoggingSubstitutions.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/LoggingSubstitutions.java @@ -3,7 +3,6 @@ import java.util.logging.Handler; import org.jboss.logmanager.LogContext; -import org.jboss.logmanager.handlers.DelayedHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,6 +12,7 @@ import com.oracle.svm.core.annotate.TargetClass; import io.quarkus.bootstrap.logging.InitialConfigurator; +import io.quarkus.bootstrap.logging.QuarkusDelayedHandler; /** */ @@ -38,7 +38,7 @@ public static Logger getLogger(Class clazz) { final class Target_io_quarkus_runtime_logging_InitialConfigurator { @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.FromAlias) @Alias - public static DelayedHandler DELAYED_HANDLER = new DelayedHandler(); + public static QuarkusDelayedHandler DELAYED_HANDLER = new QuarkusDelayedHandler(); } @TargetClass(java.util.logging.Logger.class) diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/QuarkusSubstitution.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/QuarkusSubstitution.java index 1cf9e6b8e1062..4663176e2367a 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/QuarkusSubstitution.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/QuarkusSubstitution.java @@ -1,7 +1,5 @@ package io.quarkus.runtime.graal; -import java.util.function.Consumer; - import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; @@ -12,8 +10,7 @@ final class QuarkusSubstitution { @Substitute - private static void launchFromIDE(Class quarkusApplication, Consumer exitHandler, - String... args) { + private static void launchFromIDE(Class quarkusApplication, String... args) { throw new RuntimeException("Should never be hit"); } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryBuildTimeConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryBuildTimeConfig.java new file mode 100644 index 0000000000000..f6345be593c4b --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryBuildTimeConfig.java @@ -0,0 +1,20 @@ +package io.quarkus.runtime.logging; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class CategoryBuildTimeConfig { + /** + * The minimum log level for this category. + * By default all categories are configured with DEBUG minimum level. + * + * To get runtime logging below DEBUG, e.g. TRACE, + * the minimum level has to be adjusted at build time, the right log level needs to be provided at runtime. + * + * As an example, to get TRACE logging, + * minimum level needs to be at TRACE and the runtime log level needs to match that. + */ + @ConfigItem(defaultValue = "inherit") + public InheritableLevel minLevel; +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryConfig.java index 9d5115d1c32bc..cb25fa55b14dc 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/CategoryConfig.java @@ -10,7 +10,10 @@ public class CategoryConfig { /** - * The log level level for this category + * The log level for this category. + * + * Note that to get log levels below INFO, + * the minimum level build time configuration option needs to be adjusted as well. */ @ConfigItem(defaultValue = "inherit") InheritableLevel level; diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/CleanupFilterConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/CleanupFilterConfig.java index afb95ee8a4018..ad56bdf5320a6 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/CleanupFilterConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/CleanupFilterConfig.java @@ -12,11 +12,11 @@ public class CleanupFilterConfig { * The message starts to match */ @ConfigItem(defaultValue = "inherit") - List ifStartsWith; + public List ifStartsWith; /** * The new log level for the filtered message, defaults to DEBUG */ @ConfigItem(defaultValue = "DEBUG") - Level targetLevel; + public Level targetLevel; } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LevelConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LevelConverter.java index 8669ff5e50a86..153b810a169e4 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/LevelConverter.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LevelConverter.java @@ -3,6 +3,7 @@ import static io.quarkus.runtime.configuration.ConverterSupport.DEFAULT_QUARKUS_CONVERTER_PRIORITY; import java.io.Serializable; +import java.util.Locale; import java.util.logging.Level; import javax.annotation.Priority; @@ -22,6 +23,6 @@ public Level convert(final String value) { if (value == null || value.isEmpty()) { return null; } - return LogContext.getLogContext().getLevelForName(value); + return LogContext.getLogContext().getLevelForName(value.toUpperCase(Locale.ROOT)); } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LogBuildTimeConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogBuildTimeConfig.java new file mode 100644 index 0000000000000..3fe8d6f0516d0 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogBuildTimeConfig.java @@ -0,0 +1,36 @@ +package io.quarkus.runtime.logging; + +import java.util.Map; +import java.util.logging.Level; + +import io.quarkus.runtime.annotations.ConfigDocSection; +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot(name = "log", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public class LogBuildTimeConfig { + + /** + * Whether or not logging metrics are published in case a metrics extension is present. + */ + @ConfigItem(name = "metrics.enabled", defaultValue = "false") + public boolean metricsEnabled; + + /** + * The default minimum log level. + */ + @ConfigItem(defaultValue = "DEBUG") + public Level minLevel; + + /** + * Minimum logging categories. + *

+ * Logging is done on a per-category basis. Each category can be independently configured. + * A configuration which applies to a category will also apply to all sub-categories of that category, + * unless there is a more specific matching sub-category configuration. + */ + @ConfigItem(name = "category") + @ConfigDocSection + public Map categories; +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LogConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogConfig.java index a32aa18da40c4..a03c7eacb375f 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/LogConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogConfig.java @@ -33,15 +33,6 @@ public final class LogConfig { @ConfigItem(defaultValue = "INFO") public Level level; - /** - * The default minimum log level - * - * @deprecated this functionality was never implemented, it may be deleted or implemented in a future release. - */ - @ConfigItem(defaultValue = "INFO") - @Deprecated - public Level minLevel; - /** * Console logging. *

diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandler.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandler.java new file mode 100644 index 0000000000000..f2cec8f96c236 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandler.java @@ -0,0 +1,47 @@ +package io.quarkus.runtime.logging; + +import java.util.Map.Entry; +import java.util.NavigableMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.logging.Handler; +import java.util.logging.LogRecord; + +import org.jboss.logmanager.Level; + +/** + * Measures the number of log messages based on logger configurations quarkus.log.level and quarkus.log.category.*.level + *

+ * It should reflect the values of the handler that logs the most, since best practice is to align its level with the root + * level. + *

+ * Non-standard levels are counted with the lower standard level. + */ +public class LogMetricsHandler extends Handler { + + final NavigableMap logCounters; + + public LogMetricsHandler(NavigableMap logCounters) { + this.logCounters = logCounters; + } + + @Override + public void publish(LogRecord record) { + if (isLoggable(record)) { + Entry counter = logCounters.floorEntry(record.getLevel().intValue()); + if (counter != null) { + counter.getValue().increment(); + } else { + // Default to TRACE for anything lower + logCounters.get(Level.TRACE.intValue()).increment(); + } + } + } + + @Override + public void flush() { + } + + @Override + public void close() throws SecurityException { + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandlerRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandlerRecorder.java new file mode 100644 index 0000000000000..b605e9fcd6fe7 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LogMetricsHandlerRecorder.java @@ -0,0 +1,53 @@ +package io.quarkus.runtime.logging; + +import java.util.Arrays; +import java.util.List; +import java.util.NavigableMap; +import java.util.Optional; +import java.util.TreeMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Consumer; +import java.util.logging.Handler; + +import org.jboss.logmanager.Level; + +import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.runtime.metrics.MetricsFactory; + +@Recorder +public class LogMetricsHandlerRecorder { + + static final String METRIC_NAME = "log.total"; + + static final String METRIC_DESCRIPTION = "Number of log events, per log level. Non-standard levels are counted with the lower standard level."; + + static final List STANDARD_LEVELS = Arrays.asList(Level.FATAL, Level.ERROR, Level.WARN, Level.INFO, Level.DEBUG, + Level.TRACE); + + static final NavigableMap COUNTERS = new TreeMap<>(); + + public void initCounters() { + for (Level level : STANDARD_LEVELS) { + LongAdder counter = new LongAdder(); + // Use integer value to match any non-standard equivalent level + COUNTERS.put(level.intValue(), counter); + } + } + + public Consumer registerMetrics() { + return new Consumer() { + @Override + public void accept(MetricsFactory metricsFactory) { + for (Level level : STANDARD_LEVELS) { + metricsFactory.builder(METRIC_NAME).description(METRIC_DESCRIPTION).tag("level", level.getName()) + .buildCounter(COUNTERS.get(level.intValue())::sum); + } + } + }; + } + + public RuntimeValue> getLogHandler() { + return new RuntimeValue(Optional.of(new LogMetricsHandler(COUNTERS))); + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java index 31c8ad6175bf8..9715b77d878af 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java @@ -12,6 +12,7 @@ import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Optional; import java.util.function.Supplier; import java.util.logging.ErrorManager; @@ -46,7 +47,9 @@ @Recorder public class LoggingSetupRecorder { - private static final boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("win"); + private static final org.jboss.logging.Logger log = org.jboss.logging.Logger.getLogger(LoggingSetupRecorder.class); + + private static final boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); /** * ConEmu ANSI X3.64 support enabled, @@ -75,11 +78,14 @@ public LoggingSetupRecorder() { public static void handleFailedStart() { LogConfig config = new LogConfig(); ConfigInstantiator.handleObject(config); - new LoggingSetupRecorder().initializeLogging(config, Collections.emptyList(), Collections.emptyList(), + LogBuildTimeConfig buildConfig = new LogBuildTimeConfig(); + ConfigInstantiator.handleObject(buildConfig); + new LoggingSetupRecorder().initializeLogging(config, buildConfig, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), null); } - public void initializeLogging(LogConfig config, final List>> additionalHandlers, + public void initializeLogging(LogConfig config, LogBuildTimeConfig buildConfig, + final List>> additionalHandlers, final List>> additionalNamedHandlers, final List>> possibleFormatters, final RuntimeValue>> possibleBannerSupplier) { @@ -130,6 +136,21 @@ public void initializeLogging(LogConfig config, final List entry : categories.entrySet()) { + final CategoryBuildTimeConfig buildCategory = isSubsetOf(entry.getKey(), buildConfig.categories); + final Level logLevel = getLogLevel(entry.getKey(), entry.getValue(), categories, buildConfig.minLevel); + final Level minLogLevel = buildCategory == null + ? buildConfig.minLevel + : buildCategory.minLevel.getLevel(); + + if (logLevel.intValue() < minLogLevel.intValue()) { + log.warnf("Log level %s for category '%s' set below minimum logging level %s, promoting it to %s", logLevel, + entry.getKey(), minLogLevel, minLogLevel); + + entry.getValue().level = InheritableLevel.of(minLogLevel.toString()); + } + } + for (Map.Entry entry : categories.entrySet()) { final String name = entry.getKey(); final Logger categoryLogger = logContext.getLogger(name); @@ -157,6 +178,89 @@ public void initializeLogging(LogConfig config, final List categories = config.categories; + final LogContext logContext = LogContext.getLogContext(); + final Logger rootLogger = logContext.getLogger(""); + + rootLogger.setLevel(config.level); + + ErrorManager errorManager = new OnlyOnceErrorManager(); + final Map filters = config.filters; + List filterElements = new ArrayList<>(filters.size()); + for (Entry entry : filters.entrySet()) { + filterElements.add( + new LogCleanupFilterElement(entry.getKey(), entry.getValue().targetLevel, entry.getValue().ifStartsWith)); + } + + final ArrayList handlers = new ArrayList<>(3); + + if (config.console.enable) { + final Handler consoleHandler = configureConsoleHandler(config.console, errorManager, filterElements, + Collections.emptyList(), new RuntimeValue<>(Optional.empty())); + errorManager = consoleHandler.getErrorManager(); + handlers.add(consoleHandler); + } + + Map namedHandlers = createNamedHandlers(config, Collections.emptyList(), errorManager, filterElements); + + for (Map.Entry entry : categories.entrySet()) { + final CategoryBuildTimeConfig buildCategory = isSubsetOf(entry.getKey(), buildConfig.categories); + final Level logLevel = getLogLevel(entry.getKey(), entry.getValue(), categories, buildConfig.minLevel); + final Level minLogLevel = buildCategory == null + ? buildConfig.minLevel + : buildCategory.minLevel.getLevel(); + + if (logLevel.intValue() < minLogLevel.intValue()) { + log.warnf("Log level %s for category '%s' set below minimum logging level %s, promoting it to %s", logLevel, + entry.getKey(), minLogLevel, minLogLevel); + + entry.getValue().level = InheritableLevel.of(minLogLevel.toString()); + } + } + + for (Map.Entry entry : categories.entrySet()) { + final String name = entry.getKey(); + final Logger categoryLogger = logContext.getLogger(name); + final CategoryConfig categoryConfig = entry.getValue(); + if (!categoryConfig.level.isInherited()) { + categoryLogger.setLevel(categoryConfig.level.getLevel()); + } + categoryLogger.setUseParentHandlers(categoryConfig.useParentHandlers); + if (categoryConfig.handlers.isPresent()) { + addNamedHandlersToCategory(categoryConfig, namedHandlers, categoryLogger, errorManager); + } + } + InitialConfigurator.DELAYED_HANDLER.setAutoFlush(false); + InitialConfigurator.DELAYED_HANDLER.setBuildTimeHandlers(handlers.toArray(EmbeddedConfigurator.NO_HANDLERS)); + } + + private static Level getLogLevel(String categoryName, CategoryConfig categoryConfig, Map categories, + Level rootMinLevel) { + if (Objects.isNull(categoryConfig)) + return rootMinLevel; + + final InheritableLevel inheritableLevel = categoryConfig.level; + if (!inheritableLevel.isInherited()) + return inheritableLevel.getLevel(); + + int lastDotIndex = categoryName.lastIndexOf('.'); + if (lastDotIndex == -1) + return rootMinLevel; + + String parent = categoryName.substring(0, lastDotIndex); + return getLogLevel(parent, categories.get(parent), categories, rootMinLevel); + } + + private static CategoryBuildTimeConfig isSubsetOf(String categoryName, Map categories) { + return categories.entrySet().stream() + .filter(buildCategoryEntry -> categoryName.startsWith(buildCategoryEntry.getKey())) + .map(Entry::getValue) + .findFirst() + .orElse(null); + } + private static Map createNamedHandlers(LogConfig config, List>> possibleFormatters, ErrorManager errorManager, List filterElements) { @@ -185,14 +289,27 @@ private static void addToNamedHandlers(Map namedHandlers, Handl handlerName)); } namedHandlers.put(handlerName, handler); + InitialConfigurator.DELAYED_HANDLER.addLoggingCloseTask(new Runnable() { + @Override + public void run() { + handler.close(); + } + }); } - private void addNamedHandlersToCategory(CategoryConfig categoryConfig, Map namedHandlers, + private static void addNamedHandlersToCategory(CategoryConfig categoryConfig, Map namedHandlers, Logger categoryLogger, ErrorManager errorManager) { for (String categoryNamedHandler : categoryConfig.handlers.get()) { - if (namedHandlers.get(categoryNamedHandler) != null) { - categoryLogger.addHandler(namedHandlers.get(categoryNamedHandler)); + Handler handler = namedHandlers.get(categoryNamedHandler); + if (handler != null) { + categoryLogger.addHandler(handler); + InitialConfigurator.DELAYED_HANDLER.addLoggingCloseTask(new Runnable() { + @Override + public void run() { + categoryLogger.removeHandler(handler); + } + }); } else { errorManager.error(String.format("Handler with name '%s' is linked to a category but not configured.", categoryNamedHandler), null, ErrorManager.GENERIC_FAILURE); diff --git a/core/runtime/src/main/java/io/quarkus/runtime/metrics/MetricsFactory.java b/core/runtime/src/main/java/io/quarkus/runtime/metrics/MetricsFactory.java new file mode 100644 index 0000000000000..7a7d601472f1a --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/metrics/MetricsFactory.java @@ -0,0 +1,161 @@ +package io.quarkus.runtime.metrics; + +import java.time.Duration; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * Extensions can create or register metrics using this factory + * independent of the enabled metrics provider + */ +public interface MetricsFactory { + + /** A well-known string for MicroProfile metrics provided by the SmallRye Metrics quarkus extension */ + final String MP_METRICS = "smallrye-metrics"; + + /** A well-known string for Micrometer metrics provided by the Micrometer Metrics quarkus extension */ + final String MICROMETER = "micrometer"; + + /** Registry type or scope. This may not be used by all metrics extensions. */ + public static enum Type { + APPLICATION, + BASE, + VENDOR; + } + + /** + * @return true if this factory supports the named metrics system. Arbitrary + * strings are allowed. Constants are present for a few. + * @see #MICROMETER + * @see #MP_METRICS + */ + boolean metricsSystemSupported(String name); + + /** + * @param name The name of the metric (required) + * @return a fluid builder for registering metrics (default VENDOR type). + * @see Type + */ + default MetricBuilder builder(String name) { + return builder(name, Type.VENDOR); + }; + + /** + * @param name The name of the metric (required) + * @param type The scope or type of the metric (optional, may not be used) + * @return a fluid builder for registering metrics. + * @see Type + */ + MetricBuilder builder(String name, Type type); + + interface MetricBuilder { + /** + * @param description Description text of the eventual metric (optional). + * @return The builder with added description. + */ + MetricBuilder description(String description); + + /** + * @param key The tag key. + * @param value The tag value. + * @return The builder with added tag. + */ + MetricBuilder tag(String key, String value); + + /** + * Specify the metric unit (optional) + * + * @param unit Base unit of the eventual metric + * @return The builder with added base unit. + */ + MetricBuilder unit(String unit); + + /** + * Register a counter that retrieves its value from a supplier function + * + * @param countFunction Function supplying a monotonically increasing number value + */ + void buildCounter(Supplier countFunction); + + /** + * Register a counter that retrieves its value by the applying a function + * to an object + * + * @param obj Object instance to observe + * @param countFunction Function returning a monotonically increasing value + */ + void buildCounter(T obj, Function countFunction); + + /** + * Register a gauge that retrieves its value from a supplier function + * + * @param gaugeFunction Function supplying number value + */ + void buildGauge(Supplier gaugeFunction); + + /** + * Register a gauge that retrieves its value by applying a function + * to an object + * + * @param obj Object instance to observe + * @param gaugeFunction Function returning a number value + */ + void buildGauge(T obj, Function gaugeFunction); + + /** + * @return TimeRecorder to measure passage of time using + * incremental updates. + */ + TimeRecorder buildTimer(); + + /** + * Wrap a {@link Runnable} so that it is timed when invoked. + * + * @param f The Runnable to time when it is invoked. + * @return The wrapped Runnable. + */ + Runnable buildTimer(Runnable f); + + /** + * Wrap a {@link Callable} so that it is timed when invoked. + * + * @param f The Callable to time when it is invoked. + * @param The return type of the callable. + * @return The wrapped callable. + */ + Callable buildTimer(Callable f); + + /** + * Wrap a {@link Supplier} so that it is timed when invoked. + * + * @param f The {@code Supplier} to time when it is invoked. + * @param The return type of the {@code Supplier} result. + * @return The wrapped supplier. + */ + Supplier buildTimer(Supplier f); + } + + /** + * A time recorder that tracks elapsed time using incremental updates + * using a duration with a specified time unit. + */ + interface TimeRecorder { + /** + * @param amount Duration of a single event being measured by this timer. If the amount is less than 0 + * the value will be dropped. + * @param unit Time unit for the amount being recorded. + */ + void update(long amount, TimeUnit unit); + + /** + * Updates the statistics kept by the recorder with the specified amount. + * + * @param duration Duration of a single event being measured by this timer. + */ + default void update(Duration duration) { + update(duration.toNanos(), TimeUnit.NANOSECONDS); + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/recording/substitutions/ZoneIdSubstitution.java b/core/runtime/src/main/java/io/quarkus/runtime/recording/substitutions/ZoneIdSubstitution.java new file mode 100644 index 0000000000000..9e9f28cfe2875 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/recording/substitutions/ZoneIdSubstitution.java @@ -0,0 +1,18 @@ +package io.quarkus.runtime.recording.substitutions; + +import java.time.ZoneId; + +import io.quarkus.runtime.ObjectSubstitution; + +public class ZoneIdSubstitution implements ObjectSubstitution { + + @Override + public String serialize(ZoneId obj) { + return obj.getId(); + } + + @Override + public ZoneId deserialize(String str) { + return ZoneId.of(str); + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/test/TestApplicationClassPredicate.java b/core/runtime/src/main/java/io/quarkus/runtime/test/TestApplicationClassPredicate.java new file mode 100644 index 0000000000000..26be06a319006 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/test/TestApplicationClassPredicate.java @@ -0,0 +1,10 @@ +package io.quarkus.runtime.test; + +import java.util.function.Predicate; + +/** + * This predicate can be used to distinguish application classes in the test mode. + */ +public interface TestApplicationClassPredicate extends Predicate { + +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java b/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java new file mode 100644 index 0000000000000..e6c1bd6d07be9 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java @@ -0,0 +1,24 @@ +package io.quarkus.runtime.test; + +import java.util.ArrayList; +import java.util.List; +import java.util.ServiceLoader; +import java.util.function.Function; + +/** + * Interface that can be used to integrate with the TestHTTPEndpoint infrastructure + */ +public interface TestHttpEndpointProvider { + + Function, String> endpointProvider(); + + static List, String>> load() { + List, String>> ret = new ArrayList<>(); + for (TestHttpEndpointProvider i : ServiceLoader.load(TestHttpEndpointProvider.class, + Thread.currentThread().getContextClassLoader())) { + ret.add(i.endpointProvider()); + } + return ret; + } + +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/util/ExceptionUtil.java b/core/runtime/src/main/java/io/quarkus/runtime/util/ExceptionUtil.java index 9850d434e3ad6..75185dcf478fb 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/util/ExceptionUtil.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/util/ExceptionUtil.java @@ -76,6 +76,16 @@ public static String rootCauseFirstStackTrace(final Throwable exception) { return generateStackTrace(modifiedRoot).replace("Caused by:", "Resulted in:"); } + public static Throwable getRootCause(Throwable exception) { + final List chain = new ArrayList<>(); + Throwable curr = exception; + while (curr != null && !chain.contains(curr)) { + chain.add(curr); + curr = curr.getCause(); + } + return chain.isEmpty() ? null : chain.get(chain.size() - 1); + } + /** * Creates and returns a new {@link Throwable} which has the following characteristics: *

    diff --git a/core/runtime/src/main/java/io/quarkus/runtime/util/JavaVersionUtil.java b/core/runtime/src/main/java/io/quarkus/runtime/util/JavaVersionUtil.java new file mode 100644 index 0000000000000..74d7d0c31ea06 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/util/JavaVersionUtil.java @@ -0,0 +1,37 @@ +package io.quarkus.runtime.util; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class JavaVersionUtil { + + private static final Pattern PATTERN = Pattern.compile("(?:1\\.)?(\\d+)"); + + private static boolean IS_JAVA_11_OR_NEWER; + private static boolean IS_JAVA_13_OR_NEWER; + + static { + performChecks(); + } + + // visible for testing + static void performChecks() { + Matcher matcher = PATTERN.matcher(System.getProperty("java.specification.version", "")); + if (matcher.matches()) { + int first = Integer.parseInt(matcher.group(1)); + IS_JAVA_11_OR_NEWER = (first >= 11); + IS_JAVA_13_OR_NEWER = (first >= 13); + } else { + IS_JAVA_11_OR_NEWER = false; + IS_JAVA_13_OR_NEWER = false; + } + } + + public static boolean isJava11OrHigher() { + return IS_JAVA_11_OR_NEWER; + } + + public static boolean isJava13OrHigher() { + return IS_JAVA_13_OR_NEWER; + } +} diff --git a/core/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/core/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 6e2b8f0a6e461..e3d0c8f1f88be 100644 --- a/core/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/core/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -1,3 +1,4 @@ --- name: "Quarkus - Core" -metadata: {} +metadata: + unlisted: true diff --git a/core/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.Converter b/core/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.Converter index 9c6a04c4ca0b8..7a9cef122ded4 100644 --- a/core/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.Converter +++ b/core/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.Converter @@ -7,4 +7,5 @@ io.quarkus.runtime.configuration.PathConverter io.quarkus.runtime.configuration.DurationConverter io.quarkus.runtime.configuration.MemorySizeConverter io.quarkus.runtime.configuration.LocaleConverter +io.quarkus.runtime.configuration.ZoneIdConverter io.quarkus.runtime.logging.LevelConverter diff --git a/core/runtime/src/test/java/io/quarkus/runtime/configuration/DotEnvTestCase.java b/core/runtime/src/test/java/io/quarkus/runtime/configuration/DotEnvTestCase.java index 7a95ee46003ce..e641f71699fb7 100644 --- a/core/runtime/src/test/java/io/quarkus/runtime/configuration/DotEnvTestCase.java +++ b/core/runtime/src/test/java/io/quarkus/runtime/configuration/DotEnvTestCase.java @@ -2,7 +2,7 @@ import static java.util.Collections.singletonMap; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.BufferedWriter; import java.io.IOException; @@ -61,7 +61,8 @@ private SmallRyeConfig buildConfig(Map configMap) throws IOExcep } } } - builder.withSources(new ConfigUtils.DotEnvConfigSource(dotEnv)); + builder.withSources( + new ConfigUtils.BuildTimeDotEnvConfigSourceProvider(dotEnv.toUri().toString()).getConfigSources(classLoader)); final SmallRyeConfig config = builder.build(); Files.delete(dotEnv); cpr.registerConfig(config, classLoader); @@ -83,7 +84,7 @@ public void testProperties() throws IOException { singletonMap("FOO_BAR", "foo.bar"), singletonMap("foo.baz", "nothing"))); assertEquals("foo.bar", config.getValue("foo.bar", String.class)); - assertFalse(config.getOptionalValue("foo.baz", String.class).isPresent()); + assertTrue(config.getOptionalValue("foo.baz", String.class).isPresent()); } } diff --git a/core/runtime/src/test/java/io/quarkus/runtime/configuration/LocaleConverterTest.java b/core/runtime/src/test/java/io/quarkus/runtime/configuration/LocaleConverterTest.java index 0c924c9d78117..87d61ffe46c17 100644 --- a/core/runtime/src/test/java/io/quarkus/runtime/configuration/LocaleConverterTest.java +++ b/core/runtime/src/test/java/io/quarkus/runtime/configuration/LocaleConverterTest.java @@ -1,6 +1,8 @@ package io.quarkus.runtime.configuration; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; import java.util.Locale; @@ -19,8 +21,23 @@ public void testStandardLocale() { @Test public void testUnderscoreLocale() { LocaleConverter localeConverter = new LocaleConverter(); - Locale locale = localeConverter.convert("fr-FR"); + Locale locale = localeConverter.convert("fr_FR"); assertEquals("fr", locale.getLanguage()); assertEquals("FR", locale.getCountry()); } + + @Test + public void testCLocale() { + LocaleConverter localeConverter = new LocaleConverter(); + Locale locale = localeConverter.convert("c.u-US"); + assertSame(Locale.ROOT, locale); + } + + @Test + public void testEmptyLocale() { + LocaleConverter localeConverter = new LocaleConverter(); + Locale locale = localeConverter.convert(""); + assertNull(locale); + } + } diff --git a/core/runtime/src/test/java/io/quarkus/runtime/logging/LogMetricsHandlerTest.java b/core/runtime/src/test/java/io/quarkus/runtime/logging/LogMetricsHandlerTest.java new file mode 100644 index 0000000000000..72022c189aac2 --- /dev/null +++ b/core/runtime/src/test/java/io/quarkus/runtime/logging/LogMetricsHandlerTest.java @@ -0,0 +1,65 @@ +package io.quarkus.runtime.logging; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.NavigableMap; +import java.util.TreeMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.logging.LogRecord; + +import org.jboss.logmanager.Level; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class LogMetricsHandlerTest { + + LogMetricsHandler handler; + + NavigableMap counters; + + @BeforeEach + public void setUp() { + counters = new TreeMap<>(); + counters.put(Level.TRACE.intValue(), new LongAdder()); + counters.put(Level.DEBUG.intValue(), new LongAdder()); + counters.put(Level.WARN.intValue(), new LongAdder()); + handler = new LogMetricsHandler(counters); + } + + @Test + public void equivalentLevelsShouldBeMerged() { + handler.publish(new LogRecord(Level.DEBUG, "test")); + handler.publish(new LogRecord(Level.FINE, "test")); + + assertEquals(2, counters.get(Level.DEBUG.intValue()).sum()); + } + + @Test + public void shouldFilterRecords() { + handler.setLevel(Level.INFO); + + handler.publish(new LogRecord(Level.DEBUG, "test")); + + assertEquals(0, counters.get(Level.DEBUG.intValue()).sum()); + } + + @Test + public void nonStandardLevelShouldBeCountedWithLowerStandardLevel() { + handler.publish(new LogRecord(new CustomLevel("IMPORTANT", 950), "test")); + + assertEquals(1, counters.get(Level.WARN.intValue()).sum()); + } + + @Test + public void anythingBelowTraceShouldBeCountedAsTrace() { + handler.publish(new LogRecord(Level.FINEST, "test")); + + assertEquals(1, counters.get(Level.TRACE.intValue()).sum()); + } + + static class CustomLevel extends java.util.logging.Level { + CustomLevel(final String name, final int value) { + super(name, value); + } + } +} diff --git a/core/runtime/src/test/java/io/quarkus/runtime/util/ExceptionUtilTest.java b/core/runtime/src/test/java/io/quarkus/runtime/util/ExceptionUtilTest.java index f77c661e259c5..c85696e57a8f0 100644 --- a/core/runtime/src/test/java/io/quarkus/runtime/util/ExceptionUtilTest.java +++ b/core/runtime/src/test/java/io/quarkus/runtime/util/ExceptionUtilTest.java @@ -1,7 +1,9 @@ package io.quarkus.runtime.util; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOError; @@ -48,6 +50,15 @@ public void testReversed() throws Exception { assertTrue(expectedResultedIns.isEmpty(), "Reversed stacktrace is missing certain elements"); } + @Test + public void testGetRootCause() { + Throwable e = generateException(); + Throwable rootCause = ExceptionUtil.getRootCause(e); + assertEquals(NumberFormatException.class, rootCause.getClass()); + assertNull(ExceptionUtil.getRootCause(null)); + assertEquals(NullPointerException.class, ExceptionUtil.getRootCause(new NullPointerException()).getClass()); + } + private Throwable generateException() { try { try { diff --git a/core/runtime/src/test/java/io/quarkus/runtime/util/JavaVersionUtilTest.java b/core/runtime/src/test/java/io/quarkus/runtime/util/JavaVersionUtilTest.java new file mode 100644 index 0000000000000..d2300aa148369 --- /dev/null +++ b/core/runtime/src/test/java/io/quarkus/runtime/util/JavaVersionUtilTest.java @@ -0,0 +1,65 @@ +package io.quarkus.runtime.util; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +class JavaVersionUtilTest { + + private static final String JAVA_SPECIFICATION_VERSION = "java.specification.version"; + + @Test + void testJava8() { + testWithVersion("1.8", () -> { + assertFalse(JavaVersionUtil.isJava11OrHigher()); + assertFalse(JavaVersionUtil.isJava13OrHigher()); + }); + } + + @Test + void testJava11() { + testWithVersion("11", () -> { + assertTrue(JavaVersionUtil.isJava11OrHigher()); + assertFalse(JavaVersionUtil.isJava13OrHigher()); + }); + } + + @Test + void testJava14() { + testWithVersion("14", () -> { + assertTrue(JavaVersionUtil.isJava11OrHigher()); + assertTrue(JavaVersionUtil.isJava13OrHigher()); + }); + } + + @Test + void testJava17() { + testWithVersion("17", () -> { + assertTrue(JavaVersionUtil.isJava11OrHigher()); + assertTrue(JavaVersionUtil.isJava13OrHigher()); + }); + } + + @Test + void testJava21() { + testWithVersion("21", () -> { + assertTrue(JavaVersionUtil.isJava11OrHigher()); + assertTrue(JavaVersionUtil.isJava13OrHigher()); + }); + } + + private void testWithVersion(String javaSpecificationVersion, Runnable test) { + String previous = System.getProperty(JAVA_SPECIFICATION_VERSION); + System.setProperty(JAVA_SPECIFICATION_VERSION, javaSpecificationVersion); + JavaVersionUtil.performChecks(); + try { + test.run(); + } finally { + System.setProperty(JAVA_SPECIFICATION_VERSION, previous); + JavaVersionUtil.performChecks(); + } + + } + +} diff --git a/core/test-extension/deployment/pom.xml b/core/test-extension/deployment/pom.xml index 01366b995a67e..eb4318b63fe34 100644 --- a/core/test-extension/deployment/pom.xml +++ b/core/test-extension/deployment/pom.xml @@ -25,10 +25,6 @@ io.quarkus quarkus-arc-deployment - - io.quarkus - quarkus-undertow - io.quarkus quarkus-test-extension diff --git a/core/test-extension/deployment/src/main/java/io/quarkus/extest/deployment/TestProcessor.java b/core/test-extension/deployment/src/main/java/io/quarkus/extest/deployment/TestProcessor.java index 442a52bd7d13b..e93eea246e5ff 100644 --- a/core/test-extension/deployment/src/main/java/io/quarkus/extest/deployment/TestProcessor.java +++ b/core/test-extension/deployment/src/main/java/io/quarkus/extest/deployment/TestProcessor.java @@ -25,7 +25,6 @@ import java.util.function.BooleanSupplier; import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; @@ -52,6 +51,7 @@ import io.quarkus.deployment.builditem.ShutdownContextBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedPackageBuildItem; import io.quarkus.extest.runtime.FinalFieldReflectionObject; import io.quarkus.extest.runtime.IConfigConsumer; import io.quarkus.extest.runtime.RuntimeXmlConfigService; @@ -68,6 +68,7 @@ import io.quarkus.extest.runtime.config.TestRunTimeConfig; import io.quarkus.extest.runtime.config.XmlConfig; import io.quarkus.extest.runtime.logging.AdditionalLogHandlerValueFactory; +import io.quarkus.extest.runtime.runtimeinitializedpackage.RuntimeInitializedClass; import io.quarkus.extest.runtime.subst.DSAPublicKeyObjectSubstitution; import io.quarkus.extest.runtime.subst.KeyProxy; import io.quarkus.runtime.RuntimeValue; @@ -81,9 +82,6 @@ public final class TestProcessor { static DotName TEST_ANNOTATION = DotName.createSimple(TestAnnotation.class.getName()); static DotName TEST_ANNOTATION_SCOPE = DotName.createSimple(ApplicationScoped.class.getName()); - @Inject - BuildProducer resource; - TestConfigRoot configRoot; TestBuildTimeConfig buildTimeConfig; TestBuildAndRunTimeConfig buildAndRunTimeConfig; @@ -125,7 +123,7 @@ LogHandlerBuildItem registerAdditionalLogHandler(final AdditionalLogHandlerValue } @BuildStep - void registerNativeImageResources() { + void registerNativeImageResources(BuildProducer resource) { resource.produce(new NativeImageResourceBuildItem("/DSAPublicKey.encoded")); } @@ -275,14 +273,14 @@ void checkConfig() { + buildTimeConfig.btStringOptWithDefault); } if (!buildTimeConfig.allValues.oov.equals(new ObjectOfValue("configPart1", "configPart2"))) { - throw new IllegalStateException("buildTimeConfig.oov != configPart1+onfigPart2; " + buildTimeConfig.allValues.oov); + throw new IllegalStateException("buildTimeConfig.oov != configPart1+configPart2; " + buildTimeConfig.allValues.oov); } if (!buildTimeConfig.allValues.oovWithDefault.equals(new ObjectOfValue("defaultPart1", "defaultPart2"))) { throw new IllegalStateException( "buildTimeConfig.oovWithDefault != defaultPart1+defaultPart2; " + buildTimeConfig.allValues.oovWithDefault); } if (!buildTimeConfig.allValues.ovo.equals(new ObjectValueOf("configPart1", "configPart2"))) { - throw new IllegalStateException("buildTimeConfig.oov != configPart1+onfigPart2; " + buildTimeConfig.allValues.oov); + throw new IllegalStateException("buildTimeConfig.oov != configPart1+configPart2; " + buildTimeConfig.allValues.oov); } if (!buildTimeConfig.allValues.ovoWithDefault.equals(new ObjectValueOf("defaultPart1", "defaultPart2"))) { throw new IllegalStateException( @@ -472,6 +470,11 @@ void checkMapMap(TestBuildAndRunTimeConfig btrt, TestBuildTimeConfig bt, BuildPr } } + @BuildStep + RuntimeInitializedPackageBuildItem runtimeInitializedPackage() { + return new RuntimeInitializedPackageBuildItem(RuntimeInitializedClass.class.getPackage().getName()); + } + @BuildStep(onlyIf = Never.class) void neverRunThisOne() { throw new IllegalStateException("Not supposed to run!"); diff --git a/core/test-extension/deployment/src/main/resources/application.properties b/core/test-extension/deployment/src/main/resources/application.properties index 29b6228b320f8..ed6fa0d588e09 100644 --- a/core/test-extension/deployment/src/main/resources/application.properties +++ b/core/test-extension/deployment/src/main/resources/application.properties @@ -1,5 +1,6 @@ # Log settings -quarkus.log.level=INFO +# log level in lower case for testing +quarkus.log.level=info quarkus.log.file.enable=true quarkus.log.file.level=INFO quarkus.log.file.format=%d{HH:mm:ss} %-5p [%c{2.}]] (%t) %s%e%n diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExceptionHandlingCommandModeTestCase.java b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExceptionHandlingCommandModeTestCase.java new file mode 100644 index 0000000000000..0c0e2e9d4cf44 --- /dev/null +++ b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExceptionHandlingCommandModeTestCase.java @@ -0,0 +1,29 @@ +package io.quarkus.commandmode; + +import org.assertj.core.api.Assertions; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusProdModeTest; + +public class ExceptionHandlingCommandModeTestCase { + + @RegisterExtension + static final QuarkusProdModeTest config = new QuarkusProdModeTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsManifestResource("application.properties", "microprofile-config.properties") + .addClasses(ThrowExceptionApplicationMain.class, ThrowExceptionApplication.class)) + .setApplicationName("exception-handling") + .setApplicationVersion("0.1-SNAPSHOT") + .setExpectExit(true) + .setRun(true); + + @Test + public void testRun() { + Assertions.assertThat(config.getStartupConsoleOutput()) + .contains("exception-handling").contains("Exception and exit code [1] handled by application"); + Assertions.assertThat(config.getExitCode()).isEqualTo(10); + } +} diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExitCodeTestCase.java b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExitCodeTestCase.java index 3e223671654b4..5dcdf4d41d2c3 100644 --- a/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExitCodeTestCase.java +++ b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ExitCodeTestCase.java @@ -2,7 +2,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; -import java.util.function.Consumer; +import java.util.function.BiConsumer; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; @@ -25,12 +25,13 @@ public class ExitCodeTestCase { @Test public void testReturnedExitCode() throws ExecutionException, InterruptedException { CompletableFuture future = new CompletableFuture<>(); - ApplicationLifecycleManager.run(Application.currentApplication(), ExitCodeApplication.class, new Consumer() { - @Override - public void accept(Integer integer) { - future.complete(integer); - } - }, "5"); + ApplicationLifecycleManager.run(Application.currentApplication(), ExitCodeApplication.class, + new BiConsumer() { + @Override + public void accept(Integer integer, Throwable cause) { + future.complete(integer); + } + }, "5"); Assertions.assertEquals(5, future.get()); } @@ -41,9 +42,9 @@ public void testWaitToExitWithCode() throws ExecutionException, InterruptedExcep @Override public void run() { ApplicationLifecycleManager.run(Application.currentApplication(), WaitToExitApplication.class, - new Consumer() { + new BiConsumer() { @Override - public void accept(Integer integer) { + public void accept(Integer integer, Throwable cause) { future.complete(integer); } }); @@ -62,9 +63,9 @@ public void testWaitToExitWithNoCode() throws ExecutionException, InterruptedExc @Override public void run() { ApplicationLifecycleManager.run(Application.currentApplication(), WaitToExitApplication.class, - new Consumer() { + new BiConsumer() { @Override - public void accept(Integer integer) { + public void accept(Integer integer, Throwable cause) { future.complete(integer); } }); diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplication.java b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplication.java new file mode 100644 index 0000000000000..fd5cb36e1e6b1 --- /dev/null +++ b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplication.java @@ -0,0 +1,11 @@ +package io.quarkus.commandmode; + +import io.quarkus.runtime.QuarkusApplication; + +public class ThrowExceptionApplication implements QuarkusApplication { + + @Override + public int run(String... args) throws Exception { + throw new RuntimeException("Exception thrown from application"); + } +} diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplicationMain.java b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplicationMain.java new file mode 100644 index 0000000000000..743aa04fd2352 --- /dev/null +++ b/core/test-extension/deployment/src/test/java/io/quarkus/commandmode/ThrowExceptionApplicationMain.java @@ -0,0 +1,20 @@ +package io.quarkus.commandmode; + +import java.util.function.BiConsumer; + +import io.quarkus.runtime.Quarkus; +import io.quarkus.runtime.annotations.QuarkusMain; + +@QuarkusMain +public class ThrowExceptionApplicationMain { + + public static void main(String... args) { + Quarkus.run(ThrowExceptionApplication.class, new BiConsumer() { + @Override + public void accept(Integer exitCode, Throwable cause) { + System.out.println("Exception and exit code [" + exitCode + "] handled by application"); + System.exit(10); + } + }); + } +} diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/logging/CategoryConfiguredHandlerTest.java b/core/test-extension/deployment/src/test/java/io/quarkus/logging/CategoryConfiguredHandlerTest.java index b5937e805cb10..5ccd126e0e435 100644 --- a/core/test-extension/deployment/src/test/java/io/quarkus/logging/CategoryConfiguredHandlerTest.java +++ b/core/test-extension/deployment/src/test/java/io/quarkus/logging/CategoryConfiguredHandlerTest.java @@ -7,7 +7,6 @@ import org.jboss.logmanager.formatters.PatternFormatter; import org.jboss.logmanager.handlers.ConsoleHandler; -import org.jboss.logmanager.handlers.DelayedHandler; import org.jboss.logmanager.handlers.FileHandler; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; @@ -15,6 +14,7 @@ import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.bootstrap.logging.InitialConfigurator; +import io.quarkus.bootstrap.logging.QuarkusDelayedHandler; import io.quarkus.test.QuarkusUnitTest; public class CategoryConfiguredHandlerTest { @@ -30,7 +30,7 @@ public void consoleOutputTest() { LogManager logManager = LogManager.getLogManager(); assertThat(logManager).isInstanceOf(org.jboss.logmanager.LogManager.class); - DelayedHandler delayedHandler = InitialConfigurator.DELAYED_HANDLER; + QuarkusDelayedHandler delayedHandler = InitialConfigurator.DELAYED_HANDLER; assertThat(Logger.getLogger("").getHandlers()).contains(delayedHandler); Handler handler = Arrays.stream(delayedHandler.getHandlers()).filter(h -> (h instanceof ConsoleHandler)) diff --git a/core/test-extension/deployment/src/test/java/io/quarkus/logging/LoggingTestsHelper.java b/core/test-extension/deployment/src/test/java/io/quarkus/logging/LoggingTestsHelper.java index b103f2dd1652d..d684669240cea 100644 --- a/core/test-extension/deployment/src/test/java/io/quarkus/logging/LoggingTestsHelper.java +++ b/core/test-extension/deployment/src/test/java/io/quarkus/logging/LoggingTestsHelper.java @@ -9,10 +9,10 @@ import java.util.logging.LogManager; import java.util.logging.Logger; -import org.jboss.logmanager.handlers.DelayedHandler; import org.junit.jupiter.api.Assertions; import io.quarkus.bootstrap.logging.InitialConfigurator; +import io.quarkus.bootstrap.logging.QuarkusDelayedHandler; public class LoggingTestsHelper { @@ -20,7 +20,7 @@ public static Handler getHandler(Class clazz) { LogManager logManager = LogManager.getLogManager(); assertThat(logManager).isInstanceOf(org.jboss.logmanager.LogManager.class); - DelayedHandler delayedHandler = InitialConfigurator.DELAYED_HANDLER; + QuarkusDelayedHandler delayedHandler = InitialConfigurator.DELAYED_HANDLER; assertThat(Logger.getLogger("").getHandlers()).contains(delayedHandler); assertThat(delayedHandler.getLevel()).isEqualTo(Level.ALL); diff --git a/core/test-extension/pom.xml b/core/test-extension/pom.xml index 35d0c0601bd7d..f6bd01cb36e0a 100644 --- a/core/test-extension/pom.xml +++ b/core/test-extension/pom.xml @@ -6,7 +6,6 @@ io.quarkus quarkus-core-parent 999-SNAPSHOT - ../ 4.0.0 @@ -16,6 +15,7 @@ true + true @@ -25,12 +25,6 @@ - - maven-compiler-plugin - - true - - org.sonatype.plugins nexus-staging-maven-plugin diff --git a/core/test-extension/runtime/pom.xml b/core/test-extension/runtime/pom.xml index f4c4f8b3e899f..5766ccc7da55b 100644 --- a/core/test-extension/runtime/pom.xml +++ b/core/test-extension/runtime/pom.xml @@ -30,9 +30,14 @@ io.quarkus quarkus-arc + + io.quarkus + quarkus-undertow + org.graalvm.nativeimage svm + provided io.quarkus.http diff --git a/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/TestConfigRoot.java b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/TestConfigRoot.java index f184f34abe72c..70b2a6b7c2386 100644 --- a/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/TestConfigRoot.java +++ b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/TestConfigRoot.java @@ -1,7 +1,6 @@ package io.quarkus.extest.runtime.config; -import org.eclipse.microprofile.config.inject.ConfigProperty; - +import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; @@ -10,12 +9,12 @@ public class TestConfigRoot { /** * resource location of the DSAPublicKey encoded bytes */ - @ConfigProperty + @ConfigItem public String dsaKeyLocation; /** * Should the TestProcessor#checkConfig method validate the buildTimeConfig */ - @ConfigProperty(defaultValue = "false") + @ConfigItem(defaultValue = "false") public boolean validateBuildConfig; } diff --git a/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/named/TestBuildAndRunTimeConfig.java b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/named/TestBuildAndRunTimeConfig.java new file mode 100644 index 0000000000000..b2c1f7b169eb4 --- /dev/null +++ b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/config/named/TestBuildAndRunTimeConfig.java @@ -0,0 +1,9 @@ +package io.quarkus.extest.runtime.config.named; + +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +/* Causes a CCE: https://github.com/quarkusio/quarkus/issues/13966 */ +@ConfigRoot(name = "same-simple-name-different-root", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public class TestBuildAndRunTimeConfig { +} diff --git a/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/runtimeinitializedpackage/RuntimeInitializedClass.java b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/runtimeinitializedpackage/RuntimeInitializedClass.java new file mode 100644 index 0000000000000..5425cca0aa0e8 --- /dev/null +++ b/core/test-extension/runtime/src/main/java/io/quarkus/extest/runtime/runtimeinitializedpackage/RuntimeInitializedClass.java @@ -0,0 +1,5 @@ +package io.quarkus.extest.runtime.runtimeinitializedpackage; + +public class RuntimeInitializedClass { + +} diff --git a/devtools/aesh/pom.xml b/devtools/aesh/pom.xml deleted file mode 100644 index 5aa72fc640fec..0000000000000 --- a/devtools/aesh/pom.xml +++ /dev/null @@ -1,222 +0,0 @@ - - - - quarkus-build-parent - io.quarkus - 999-SNAPSHOT - ../../build-parent/pom.xml - - 4.0.0 - - quarkus-cli - Quarkus - Dev tools - CLI Native Tool - - - INFO - - 1.8 - 1.8 - - - - - - - io.quarkus - quarkus-bootstrap-core - - - io.quarkus - quarkus-devtools-common - - - io.quarkus - quarkus-development-mode-spi - - - io.quarkus - quarkus-core-deployment - - - io.quarkus - quarkus-platform-descriptor-json - - - io.quarkus - quarkus-platform-descriptor-resolver-json - - - - - - org.aesh - aesh - - - - - org.junit.jupiter - junit-jupiter - test - - - org.apache.commons - commons-lang3 - - - org.apache.maven.shared - maven-invoker - - - - - - - ${project.basedir}/../bom-descriptor-json/target - quarkus-bom-descriptor - false - - extensions.json - - - - - - maven-surefire-plugin - - 1 - false - - - - maven-jar-plugin - 3.1.0 - - - - true - io.quarkus.cli.QuarkusCli - - - - - - - test-jar - - - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.2.0 - - - - - io.quarkus.cli.QuarkusCli - - - - - - package - - shade - - - - - io.quarkus:quarkus-bom-descriptor-json - org.fusesource.jansi:jansi - org.jboss.shrinkwrap.resolver:shrinkwrap-resolver-depchain - junit:junit - - - - - - - - - - - native-image - - - native-cli - - - - - - org.apache.maven.plugins - maven-dependency-plugin - 3.1.1 - - - copy - package - - copy-dependencies - - - - - ${project.build.directory} - true - - - - org.codehaus.mojo - exec-maven-plugin - 1.6.0 - - - copy - package - - exec - - - - - ${env.GRAALVM_HOME}/bin/native-image - target - - --verbose - -H:+ReportUnsupportedElementsAtRuntime - -H:ReflectionConfigurationFiles=../reflectconfigs/quarkuscli.json - -jar - ${project.build.finalName}.jar - - - - - - - - diff --git a/devtools/aesh/reflectconfigs/quarkuscli.json b/devtools/aesh/reflectconfigs/quarkuscli.json deleted file mode 100644 index d6776a45129eb..0000000000000 --- a/devtools/aesh/reflectconfigs/quarkuscli.json +++ /dev/null @@ -1,74 +0,0 @@ -[ - { - "name" : "io.quarkus.cli.commands.AddExtensionCommand", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true, - "fields" : [ - { "name" : "extensions" }, - { "name" : "path" } - ] - }, - { - "name" : "io.quarkus.cli.commands.CreateProjectCommand", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true, - "fields" : [ - { "name" : "artifactId" }, - { "name" : "groupId" }, - { "name" : "version" }, - { "name" : "className" }, - { "name" : "resourcePath" }, - { "name" : "buildTool" }, - { "name" : "extensions" }, - { "name" : "path" } - ] - }, - { - "name" : "io.quarkus.cli.commands.ListExtensionsCommand", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true, - "fields" : [ - { "name" : "all" }, - { "name" : "format" }, - { "name" : "searchPattern" }, - { "name" : "path" } - ] - }, - { - "name" : "io.quarkus.cli.commands.QuarkusBaseCommand", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true, - "fields" : [ - { "name" : "interactive" } - ] - }, - { - "name" : "org.aesh.command.impl.completer.BooleanOptionCompleter", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true - }, - { - "name" : "org.aesh.command.impl.completer.FileOptionCompleter", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true - }, { - "name" : "org.aesh.command.impl.parser.AeshOptionParser", - "allDeclaredConstructors" : true, - "allPublicConstructors" : true, - "allDeclaredMethods" : true, - "allPublicMethods" : true - } - -] diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/QuarkusCli.java b/devtools/aesh/src/main/java/io/quarkus/cli/QuarkusCli.java deleted file mode 100644 index 1df2f522983de..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/QuarkusCli.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.quarkus.cli; - -import org.aesh.AeshRuntimeRunner; - -import io.quarkus.cli.commands.QuarkusBaseCommand; - -public class QuarkusCli { - - public static void main(String[] args) { - AeshRuntimeRunner.builder() - .command(QuarkusBaseCommand.class) - .args(args) - .interactive(true) - .execute(); - } - -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/AddExtensionsCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/AddExtensionsCommand.java deleted file mode 100644 index 578e390028855..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/AddExtensionsCommand.java +++ /dev/null @@ -1,78 +0,0 @@ -package io.quarkus.cli.commands; - -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandException; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Arguments; -import org.aesh.command.option.Option; -import org.aesh.io.Resource; -import org.aesh.selector.MultiSelect; - -import io.quarkus.dependencies.Extension; -import io.quarkus.devtools.commands.AddExtensions; -import io.quarkus.devtools.commands.data.QuarkusCommandException; -import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; -import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; - -@CommandDefinition(name = "add", description = "Install extensions to a project") -public class AddExtensionsCommand implements Command { - - @Option(shortName = 'p', description = "Path to the project, if not set it will use the current working directory") - private Resource path; - - @Arguments(completer = ExtensionCompleter.class, description = "Name of the extension that will be added to the project") - private Set extensions; - - @Override - public CommandResult execute(CommandInvocation invocation) throws CommandException, InterruptedException { - - try { - Path projectDirectory = path != null ? Paths.get(path.getAbsolutePath()) - : Paths.get(System.getProperty("user.dir")); - - QuarkusProject quarkusProject = QuarkusProject.resolveExistingProject(projectDirectory, - QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor()); - - AddExtensions project = new AddExtensions(quarkusProject); - //if extensions is not set, create a selector - if (extensions == null || extensions.isEmpty()) { - MultiSelect selector = new MultiSelect(invocation.getShell(), - getAllExtensions(quarkusProject).stream().map(Extension::getSimplifiedArtifactId) - .collect(Collectors.toList()), - "Select the extensions that will be added to your project"); - - extensions = new HashSet<>(selector.doSelect()); - } - project.extensions(extensions); - - QuarkusCommandOutcome result = project.execute(); - if (result.isSuccess()) { - invocation.println("Added " + extensions + " to the project."); - } else { - invocation.println("Unable to add an extension matching " + extensions); - } - } catch (QuarkusCommandException | IOException e) { - invocation.println("Unable to add an extension matching " + extensions + ": " + e.getMessage()); - } catch (IllegalStateException e) { - invocation.println("No build file in " + path + " found. Will not attempt to add any extensions."); - } - - return CommandResult.SUCCESS; - } - - private List getAllExtensions(QuarkusProject quarkusProject) throws IOException { - return quarkusProject.getPlatformDescriptor().getExtensions(); - } - -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/BuildToolConverter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/BuildToolConverter.java deleted file mode 100644 index 3927cfd191007..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/BuildToolConverter.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.command.converter.Converter; -import org.aesh.command.converter.ConverterInvocation; -import org.aesh.command.validator.OptionValidatorException; - -import io.quarkus.devtools.project.BuildTool; - -public class BuildToolConverter implements Converter { - @Override - public BuildTool convert(ConverterInvocation invocation) throws OptionValidatorException { - if (invocation.getInput() != null && invocation.getInput().length() > 0) - return BuildTool.findTool(invocation.getInput()); - else - throw new OptionValidatorException(invocation.getInput() + " was not recognized as a supported build tool"); - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/CompileProjectCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/CompileProjectCommand.java deleted file mode 100644 index 2f45ebd6a6d79..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/CompileProjectCommand.java +++ /dev/null @@ -1,71 +0,0 @@ -package io.quarkus.cli.commands; - -import java.io.File; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandException; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Argument; -import org.aesh.command.option.Option; -import org.aesh.io.Resource; - -import io.quarkus.devtools.project.BuildTool; -import io.quarkus.devtools.project.QuarkusProject; - -@CommandDefinition(name = "build", description = "Compiles the targeted project") -public class CompileProjectCommand implements Command { - @Option(name = "clean", hasValue = false, shortName = 'c', description = "Clean the project before compiling") - private boolean clean; - - @Argument(description = "Path to the project, if not set it will use the current working directory") - private Resource path; - - @Override - public CommandResult execute(CommandInvocation invocation) throws CommandException, InterruptedException { - - File projectPath = path != null ? new File(path.getAbsolutePath()) : new File(System.getProperty("user.dir")); - - BuildTool buildTool = QuarkusProject.resolveExistingProjectBuildTool(projectPath.toPath()); - - if (buildTool.getBuildFiles() != null && buildTool.getBuildFiles().length > 0) { - File buildFile = new File(buildTool.getBuildFiles()[0]); - - if (!buildFile.isFile()) { - invocation.println("Was not able to find a build file in: " + projectPath); - return CommandResult.FAILURE; - } - - try { - if (buildTool.equals(BuildTool.MAVEN)) { - File wrapper = ExecuteUtil.getMavenWrapper(projectPath.getAbsolutePath()); - if (wrapper != null) { - ExecuteUtil.executeWrapper(invocation, wrapper, "package"); - } else { - ExecuteUtil.executeMaven(projectPath, invocation, "package"); - } - - } - //do gradle - else { - File wrapper = ExecuteUtil.getGradleWrapper(projectPath.getAbsolutePath()); - if (wrapper != null) { - ExecuteUtil.executeWrapper(invocation, wrapper, "build"); - } else { - ExecuteUtil.executeGradle(projectPath, invocation, "build"); - } - } - } catch (InterruptedException i) { - invocation.println("Build was interrupted."); - return CommandResult.FAILURE; - } - - return CommandResult.SUCCESS; - } else { - invocation.println("Was not able to find a build file in: " + projectPath); - return CommandResult.FAILURE; - } - } - -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/CreateProjectCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/CreateProjectCommand.java deleted file mode 100644 index 3c88453e72304..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/CreateProjectCommand.java +++ /dev/null @@ -1,163 +0,0 @@ -package io.quarkus.cli.commands; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Argument; -import org.aesh.command.option.Option; -import org.aesh.command.option.OptionList; -import org.aesh.command.shell.Shell; -import org.aesh.io.Resource; -import org.aesh.readline.action.KeyAction; -import org.aesh.readline.terminal.Key; -import org.aesh.selector.MultiSelect; -import org.aesh.selector.SelectorType; -import org.aesh.terminal.utils.Config; -import org.apache.commons.lang3.StringUtils; - -import io.quarkus.dependencies.Extension; -import io.quarkus.devtools.commands.AddExtensions; -import io.quarkus.devtools.commands.CreateProject; -import io.quarkus.devtools.commands.data.QuarkusCommandException; -import io.quarkus.devtools.project.BuildTool; -import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; - -@CommandDefinition(name = "create", generateHelp = true, description = "Creates a base Quarkus project") -public class CreateProjectCommand implements Command { - - @Option(name = "groupid", shortName = 'g', defaultValue = "org.acme.quarkus.sample", askIfNotSet = true, description = "The groupId of the project") - private String groupId; - - @Option(name = "artifactid", shortName = 'a', defaultValue = "my-quarkus-project", askIfNotSet = true, description = "The artifactId of the project (also often the name of the project") - private String artifactId; - - @Option(shortName = 'v', defaultValue = "1.0.0-SNAPSHOT", askIfNotSet = true, description = "Project version number") - private String version; - - @Option(shortName = 'c', name = "classname", description = "Rest resource name, by default set to: groupId+artifactId+HelloResource") - private String className; - - @Option(shortName = 'p', name = "resourcepath", description = "Rest resource path, by default set to /hello") - private String resourcePath; - - @Option(shortName = 'b', selector = SelectorType.SELECT, completer = ProjectTypeCompleter.class, converter = BuildToolConverter.class, description = "Build tool type for the project") - private BuildTool buildTool; - - @OptionList(shortName = 'e', completer = ExtensionCompleter.class, description = "Extensions that will be added to the build file") - private Set extensions; - - @Argument(description = "Path to the new project, if not set it will use the current working directory") - private Resource path; - - public CommandResult execute(CommandInvocation invocation) { - try { - Path projectDirectory = path != null ? Paths.get(path.getAbsolutePath()) - : Paths.get(System.getProperty("user.dir")); - - if (className == null) { - invocation.print("Do you want to create a REST resource? (y/n) "); - KeyAction input = invocation.input(); - invocation.print(Config.getLineSeparator()); - if (input == Key.y) - processClassName(invocation.getShell()); - } - - Files.createDirectories(projectDirectory); - - final Map context = new HashMap<>(); - context.put("path", resourcePath); - boolean status = new CreateProject(projectDirectory, - QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor()) - .groupId(groupId) - .artifactId(artifactId) - .version(version) - .buildTool(buildTool) - .className(className) - .doCreateProject(context); - - if ((extensions == null || extensions.isEmpty()) && status) { - //ask if the user want to add any extensions - try { - invocation.print("Do you want to add any extensions in addition to rest? (y/n) "); - if (invocation.input() == Key.y) { - QuarkusProject quarkusProject = QuarkusProject.resolveExistingProject(projectDirectory, - QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor()); - - MultiSelect selector = new MultiSelect(invocation.getShell(), - getAllExtensions(quarkusProject).stream().map(Extension::getSimplifiedArtifactId) - .collect(Collectors.toList()), - "Select the extensions that will be added to your project"); - - extensions = new HashSet<>(selector.doSelect()); - - status = new AddExtensions(QuarkusProject.resolveExistingProject(projectDirectory, - QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor())) - .extensions(extensions).execute() - .isSuccess(); - } - invocation.print(Config.getLineSeparator()); - } catch (InterruptedException ie) { - //ignored - } - } - - if (status) { - invocation.println("Project " + artifactId + - " created successfully at " + projectDirectory.toString() + "."); - } else { - invocation.println("Failed to create project"); - } - } catch (InterruptedException e) { - invocation.println("Project creation was aborted, " + e.getMessage()); - return CommandResult.FAILURE; - } catch (IOException | QuarkusCommandException e) { - invocation.println("Project creation failed, " + e.getMessage()); - invocation.println("Is the given path an empty folder?"); - return CommandResult.FAILURE; - } - - return CommandResult.SUCCESS; - } - - private void processClassName(Shell shell) throws InterruptedException { - String defaultResourceName = groupId.replace("-", ".").replace("_", "."); - className = shell.readLine("Set the resource class name, ( HelloResource ): "); - if (className == null || className.length() == 0) - className = defaultResourceName + ".HelloResource"; - else { - className = defaultResourceName + "." + className; - } - - if (resourcePath == null || resourcePath.length() == 0) { - resourcePath = shell.readLine("Set the resource path (" + getDerivedPath(className) + "): "); - if (resourcePath == null || resourcePath.length() == 0) - resourcePath = getDerivedPath(className); - if (!resourcePath.startsWith("/")) - resourcePath = "/" + resourcePath; - } - - } - - private static String getDerivedPath(String className) { - String[] resourceClassName = StringUtils.splitByCharacterTypeCamelCase( - className.substring(className.lastIndexOf(".") + 1)); - return "/" + resourceClassName[0].toLowerCase(); - } - - private List getAllExtensions(QuarkusProject quarkusProject) throws IOException { - return quarkusProject.getPlatformDescriptor().getExtensions(); - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeCommand.java deleted file mode 100644 index c84beaaadb17e..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeCommand.java +++ /dev/null @@ -1,160 +0,0 @@ -package io.quarkus.cli.commands; - -import java.io.File; -import java.io.IOException; -import java.util.List; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandException; -import org.aesh.command.CommandNotFoundException; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Argument; -import org.aesh.command.option.Option; -import org.aesh.command.option.OptionList; -import org.aesh.command.parser.CommandLineParserException; -import org.aesh.command.validator.CommandValidatorException; -import org.aesh.command.validator.OptionValidatorException; - -import io.quarkus.devtools.project.BuildTool; - -@CommandDefinition(name = "dev", description = "Starts up a development mode process for a Quarkus project.") -public class DevModeCommand implements Command { - - @Option(shortName = 'd', completer = DevModeDebugCompleter.class, defaultValue = { - "true" }, validator = DevModeDebugValidator.class, description = "If this server should be started in debug mode. " - + - "The default is to start in debug mode without suspending and listen on port 5005." + - " It supports the following options:\n" + - " \"false\" - The JVM is not started in debug mode\n" + - " \"true\" - The JVM is started in debug mode and suspends until a debugger is attached to port 5005\n" + - " \"client\" - The JVM is started in client mode, and attempts to connect to localhost:5005\n" + - "\"{port}\" - The JVM is started in debug mode and suspends until a debugger is attached to {port}") - private String debug; - - @Option(shortName = 'u', hasValue = false, name = "suspend", description = "Whether or not the JVM launch, in debug mode, should be suspended." - + "This parameter is only relevant when the JVM is launched in debug mode.") - private boolean suspend; - - @Option(shortName = 'b', name = "build", description = "Build folder, if not set the default folder for the used build tool will be used") - private File buildDir; - - @Option(shortName = 's', name = "source", description = "Source folder, if not set the default folder for the used build tool will be used") - private File sourceDir; - - @Option(name = "jvm-args", shortName = 'j', description = "JVM arguments to the dev mode process") - private String jvmArgs; - - @Option(name = "delete-dev-jar", shortName = 'e', hasValue = false, defaultValue = { - "true" }, description = "Delete the dev jar after it finishes") - private boolean deleteDevJar; - - @Option(name = "prevent-no-verify", shortName = 'p', defaultValue = { - "false" }, hasValue = false, description = "This value is intended to be set to true when some generated bytecode is eroneous causing " - + " the JVM to crash when the verify:non option is set (which is on by default).") - private boolean preventNoVerify; - - @Option(name = "no-deps", shortName = 'n', hasValue = false, defaultValue = { - "false" }, description = "Whether changes in the projects that appear to be dependencies of the project containing the " - + " application to be launched should trigger hot-reload. By default they do.") - - @OptionList(name = "compiler-args", shortName = 'c', description = "Additional parameters to pass to javac when recompiling changed source files.") - private List compilerArgs; - - @Argument(description = "Path to the project, if not set it will use the current working directory") - private File projectPath; - - private BuildTool buildTool; - private File buildFile; - - @Override - public CommandResult execute(CommandInvocation invocation) throws CommandException, InterruptedException { - - if (!verifyProjectStatus(invocation)) - return CommandResult.FAILURE; - - try { - devMode(invocation); - } catch (InterruptedException i) { - invocation.println("Stopping dev-mode"); - } - - return CommandResult.SUCCESS; - } - - private void devMode(CommandInvocation invocation) throws InterruptedException { - if (buildTool.equals(BuildTool.MAVEN)) { - File wrapper = ExecuteUtil.getMavenWrapper(projectPath.getAbsolutePath()); - if (wrapper != null) { - ExecuteUtil.executeWrapper(invocation, wrapper, "quarkus:dev"); - } else { - ExecuteUtil.executeMaven(projectPath, invocation, "quarkus:dev"); - } - - } - //do gradle - else { - File wrapper = ExecuteUtil.getGradleWrapper(projectPath.getAbsolutePath()); - if (wrapper != null) { - ExecuteUtil.executeWrapper(invocation, wrapper, "quarkusDev"); - } else { - ExecuteUtil.executeGradle(projectPath, invocation, "quarkusDev"); - } - } - } - - private boolean verifyProjectStatus(CommandInvocation invocation) { - - if (projectPath == null) - projectPath = new File(System.getProperty("user.dir")); - - buildTool = BuildTool.resolveExistingProject(projectPath.toPath()); - - if (buildTool.getBuildFiles().length > 0) - buildFile = new File(buildTool.getBuildFiles()[0]); - - if (buildFile == null || !buildFile.isFile()) { - invocation.println("Was not able to find a build file in: " + projectPath); - return false; - } - - if (buildDir == null) - buildDir = new File(buildTool.getBuildDirectory()); - - if (sourceDir == null) - sourceDir = resolveSourceDir(); - - if (!sourceDir.isDirectory()) { - invocation.println("ERROR: The project's sources directory does not exists (" + sourceDir); - return false; - } - - if (!buildDir.isDirectory() || !new File(buildDir, "classes").isDirectory()) { - invocation.println("Build directory (" + buildDir + " wasn't found. Compiling..."); - //if we run interactive, the name is 'compile-project', otherwise it's 'quarkus compile-project' - try { - invocation.executeCommand("compile-project"); - } catch (CommandNotFoundException e) { - try { - invocation.executeCommand("quarkus compile-project"); - } catch (CommandNotFoundException | CommandLineParserException | OptionValidatorException - | CommandValidatorException | CommandException | InterruptedException | IOException e2) { - invocation.println("Failure during compile, aborting: " + e2.getMessage()); - return false; - } - } catch (CommandLineParserException | OptionValidatorException - | CommandValidatorException | CommandException | InterruptedException | IOException e) { - invocation.println("Failure during compile, aborting: " + e.getMessage()); - return false; - } - } - - return true; - } - - private File resolveSourceDir() { - return new File(projectPath.getAbsolutePath() + File.separatorChar + "src" - + File.separatorChar + "main" + File.separatorChar + "java"); - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugCompleter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugCompleter.java deleted file mode 100644 index b3bb9d03bee55..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugCompleter.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.command.completer.CompleterInvocation; -import org.aesh.command.completer.OptionCompleter; - -public class DevModeDebugCompleter implements OptionCompleter { - @Override - public void complete(CompleterInvocation invocation) { - if (invocation.getGivenCompleteValue() == null || invocation.getGivenCompleteValue().length() == 0) { - invocation.addCompleterValue("false"); - invocation.addCompleterValue("true"); - invocation.addCompleterValue("client"); - invocation.addCompleterValue("{port}"); - } else { - if ("false".startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue("false"); - else if ("true".startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue("true"); - else if ("client".startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue("client"); - } - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugValidator.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugValidator.java deleted file mode 100644 index 69af1e0d5e63d..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/DevModeDebugValidator.java +++ /dev/null @@ -1,22 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.command.validator.OptionValidator; -import org.aesh.command.validator.OptionValidatorException; -import org.aesh.command.validator.ValidatorInvocation; - -public class DevModeDebugValidator implements OptionValidator> { - @Override - public void validate(ValidatorInvocation invocation) throws OptionValidatorException { - if (!invocation.getValue().equals("true") || - !invocation.getValue().equals("false") || - !invocation.getValue().equals("client")) { - //finally check if the value is a number - try { - Integer.parseInt(invocation.getValue()); - } catch (NumberFormatException nfe) { - throw new OptionValidatorException("The debug option need to be either: true, false, client or a port number"); - } - } - - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExecuteUtil.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExecuteUtil.java deleted file mode 100644 index 4a616f41f191d..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExecuteUtil.java +++ /dev/null @@ -1,135 +0,0 @@ -package io.quarkus.cli.commands; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.Optional; -import java.util.regex.Pattern; -import java.util.stream.Stream; - -import org.aesh.command.invocation.CommandInvocation; -import org.apache.maven.shared.invoker.DefaultInvocationRequest; -import org.apache.maven.shared.invoker.DefaultInvoker; -import org.apache.maven.shared.invoker.InvocationRequest; -import org.apache.maven.shared.invoker.InvocationResult; -import org.apache.maven.shared.invoker.Invoker; -import org.apache.maven.shared.invoker.MavenInvocationException; - -public class ExecuteUtil { - - public static void executeGradle(File projectDirectory, CommandInvocation invocation, String buildTarget) - throws InterruptedException { - String gradleExecutable = findExecutable("gradle"); - if (gradleExecutable == null) { - invocation.println("unable to find the gradle executable, is it in your path?"); - } else { - gradleExecutable += File.separator + "bin" + File.separator + "gradle"; - - try { - Process process = new ProcessBuilder() - .command(gradleExecutable, buildTarget) - .directory(projectDirectory) - .start(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); - - String line; - while ((line = reader.readLine()) != null) { - invocation.println(line); - } - - int exit = process.waitFor(); - if (exit != 0) - invocation.println("Build failed."); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - - public static void executeMaven(File projectDirectory, CommandInvocation invocation, String buildTarget) { - String mvnPath = findExecutable("mvn"); - System.setProperty("maven.home", mvnPath); - - InvocationRequest request = new DefaultInvocationRequest(); - request.setPomFile(new File(projectDirectory.getAbsolutePath() + File.separatorChar + "pom.xml")); - request.setGoals(Collections.singletonList(buildTarget)); - - Invoker invoker = new DefaultInvoker(); - - InvocationResult result = null; - try { - result = invoker.execute(request); - } catch (MavenInvocationException e) { - invocation.println("Failed during invocation: " + e.getMessage()); - } - - if (result.getExitCode() != 0) { - invocation.println("Build failed."); - } - } - - public static String findExecutable(String exec) { - Optional mvnPath = Stream.of(System.getenv("PATH").split(Pattern.quote(File.pathSeparator))) - .map(Paths::get) - .filter(path -> Files.exists(path.resolve(exec))).findFirst(); - - return mvnPath.map(value -> value.getParent().toString()).orElse(null); - } - - public static void executeWrapper(CommandInvocation invocation, File wrapper, String target) throws InterruptedException { - try { - Process process = new ProcessBuilder() - .command("./" + wrapper.getName(), target) - .directory(wrapper.getParentFile()) - .start(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); - - String line; - while ((line = reader.readLine()) != null) { - invocation.println(line); - } - - int exit = process.waitFor(); - if (exit != 0) - invocation.println("Build failed."); - } catch (IOException e) { - e.printStackTrace(); - } - } - - public static File getGradleWrapper(String projectPath) { - if (System.getProperty("os.name").startsWith("Windows")) { - File wrapper = new File(projectPath + File.separator + "gradlew.bat"); - if (wrapper.isFile()) - return wrapper; - } else { - File wrapper = new File(projectPath + File.separator + "gradlew"); - if (wrapper.isFile()) - return wrapper; - } - - return null; - } - - public static File getMavenWrapper(String projectPath) { - if (System.getProperty("os.name").startsWith("Windows")) { - File wrapper = new File(projectPath + File.separator + "mvnw.bat"); - if (wrapper.isFile()) - return wrapper; - } else { - File wrapper = new File(projectPath + File.separator + "mvnw"); - if (wrapper.isFile()) - return wrapper; - } - - return null; - } - -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionCompleter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionCompleter.java deleted file mode 100644 index a067f551114b3..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionCompleter.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.cli.commands; - -import java.util.List; -import java.util.stream.Collectors; - -import org.aesh.command.completer.CompleterInvocation; -import org.aesh.command.completer.OptionCompleter; - -import io.quarkus.dependencies.Extension; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; - -public class ExtensionCompleter implements OptionCompleter { - - @Override - public void complete(CompleterInvocation invocation) { - if (invocation.getGivenCompleteValue().length() == 0) { - invocation.addAllCompleterValues( - getAllExtensions().stream().map(Extension::getSimplifiedArtifactId).collect(Collectors.toList())); - } else { - for (Extension loadExtension : getAllExtensions()) { - if (loadExtension.getSimplifiedArtifactId().startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue(loadExtension.getSimplifiedArtifactId()); - } - } - } - - private List getAllExtensions() { - return QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor().getExtensions(); - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionFormat.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionFormat.java deleted file mode 100644 index dd7527701775b..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ExtensionFormat.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.quarkus.cli.commands; - -public enum ExtensionFormat { - CONCISE("concise"), - NAME("name"), - FULL("full"); - - private final String format; - - ExtensionFormat(String format) { - this.format = format; - } - - public String formatValue() { - return format; - } - - public static ExtensionFormat findFormat(String format) { - if (format.equalsIgnoreCase(FULL.format)) - return FULL; - else if (format.equalsIgnoreCase(NAME.format)) - return NAME; - else - return CONCISE; - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatCompleter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatCompleter.java deleted file mode 100644 index 75d2f1de4e5b5..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatCompleter.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.command.completer.CompleterInvocation; -import org.aesh.command.completer.OptionCompleter; - -public class FormatCompleter implements OptionCompleter { - - @Override - public void complete(CompleterInvocation invocation) { - if (invocation.getGivenCompleteValue().length() == 0) { - for (ExtensionFormat format : ExtensionFormat.values()) - invocation.addCompleterValue(format.formatValue()); - } else { - for (ExtensionFormat format : ExtensionFormat.values()) { - if (format.formatValue().startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue(format.formatValue()); - } - } - - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatConverter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatConverter.java deleted file mode 100644 index 017c806df2910..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/FormatConverter.java +++ /dev/null @@ -1,15 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.command.converter.Converter; -import org.aesh.command.converter.ConverterInvocation; -import org.aesh.command.validator.OptionValidatorException; - -public class FormatConverter implements Converter { - @Override - public ExtensionFormat convert(ConverterInvocation converterInvocation) throws OptionValidatorException { - if (converterInvocation.getInput() != null && converterInvocation.getInput().length() > 0) - return ExtensionFormat.findFormat(converterInvocation.getInput()); - - return ExtensionFormat.CONCISE; - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ListExtensionsCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/ListExtensionsCommand.java deleted file mode 100644 index 168da6237b02f..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ListExtensionsCommand.java +++ /dev/null @@ -1,57 +0,0 @@ -package io.quarkus.cli.commands; - -import java.nio.file.Path; -import java.nio.file.Paths; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandException; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Option; -import org.aesh.io.Resource; -import org.aesh.selector.SelectorType; - -import io.quarkus.devtools.commands.ListExtensions; -import io.quarkus.devtools.commands.data.QuarkusCommandException; -import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; - -@CommandDefinition(name = "list", generateHelp = true, description = "List extensions for a project") -public class ListExtensionsCommand implements Command { - - @Option(shortName = 'a', hasValue = false, description = "Display all or just the installable extensions.") - private boolean all = false; - - @Option(shortName = 'f', selector = SelectorType.SELECT, completer = FormatCompleter.class, converter = FormatConverter.class, description = "Select the output format among:\n" - + - "'name' - display the name only\n" + - "'concise' - (display name and description\n" + - "'full' - (concise format and version related columns.\n") - private ExtensionFormat format; - - @Option(shortName = 's', hasValue = true, defaultValue = { - "*" }, description = "Search filter on extension list. The format is based on Java Pattern.") - private String searchPattern; - - @Option(shortName = 'p', description = "Path to the project, if not set it will use the current working directory") - private Resource path; - - public CommandResult execute(CommandInvocation commandInvocation) throws CommandException, InterruptedException { - try { - Path projectDirectory = path != null ? Paths.get(path.getAbsolutePath()) - : Paths.get(System.getProperty("user.dir")); - - new ListExtensions(QuarkusProject.resolveExistingProject(projectDirectory, - QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor())) - .all(all) - .format(format.formatValue()) - .search(searchPattern) - .execute(); - } catch (QuarkusCommandException e) { - e.printStackTrace(); - } - return CommandResult.SUCCESS; - } - -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ProjectTypeCompleter.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/ProjectTypeCompleter.java deleted file mode 100644 index 61f348e08bf38..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/ProjectTypeCompleter.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.quarkus.cli.commands; - -import java.util.Arrays; -import java.util.stream.Collectors; - -import org.aesh.command.completer.CompleterInvocation; -import org.aesh.command.completer.OptionCompleter; - -import io.quarkus.devtools.project.BuildTool; - -public class ProjectTypeCompleter implements OptionCompleter { - - @Override - public void complete(CompleterInvocation invocation) { - if (invocation.getGivenCompleteValue().length() == 0) - invocation.addAllCompleterValues( - Arrays.stream(BuildTool.values()).map(BuildTool::name).collect(Collectors.toList())); - else { - for (BuildTool tool : BuildTool.values()) { - if (tool.name().startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue(tool.name()); - else if (tool.name().toLowerCase().startsWith(invocation.getGivenCompleteValue())) - invocation.addCompleterValue(tool.name().toLowerCase()); - } - } - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/QuarkusBaseCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/QuarkusBaseCommand.java deleted file mode 100644 index 6cf383de0df86..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/QuarkusBaseCommand.java +++ /dev/null @@ -1,49 +0,0 @@ -package io.quarkus.cli.commands; - -import org.aesh.AeshConsoleRunner; -import org.aesh.command.Command; -import org.aesh.command.CommandResult; -import org.aesh.command.GroupCommandDefinition; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Option; -import org.aesh.terminal.utils.Config; - -@GroupCommandDefinition(name = QuarkusBaseCommand.COMMAND_NAME, generateHelp = true, groupCommands = { - ListExtensionsCommand.class, - AddExtensionsCommand.class, - CreateProjectCommand.class, - DevModeCommand.class, - CompileProjectCommand.class }, description = " [] \n\nThese are the common quarkus commands used in various situations") -public class QuarkusBaseCommand implements Command { - public static final String COMMAND_NAME = "quarkus"; - - @Option(shortName = 'i', hasValue = false, description = "Starts an interactive Quarkus Shell") - private boolean interactive; - - public CommandResult execute(CommandInvocation commandInvocation) { - if (interactive) { - commandInvocation.println("Starting interactive CLI...."); - //we need to stop first since the QuarkusCli starts the runner with interactive = true - commandInvocation.stop(); - startInteractive(); - } - - return CommandResult.SUCCESS; - } - - private void startInteractive() { - //we start the CLI in a new thread since the caller has a TerminalConnection open - Runnable runnable = () -> { - AeshConsoleRunner.builder() - .command(CreateProjectCommand.class) - .command(AddExtensionsCommand.class) - .command(ListExtensionsCommand.class) - .command(DevModeCommand.class) - .command(CompileProjectCommand.class) - .prompt("[quarkus@" + Config.getUserDir() + "]$ ") - .addExitCommand() - .start(); - }; - new Thread(runnable).start(); - } -} diff --git a/devtools/aesh/src/main/java/io/quarkus/cli/commands/RemoveExtensionCommand.java b/devtools/aesh/src/main/java/io/quarkus/cli/commands/RemoveExtensionCommand.java deleted file mode 100644 index 45a40b8f81a69..0000000000000 --- a/devtools/aesh/src/main/java/io/quarkus/cli/commands/RemoveExtensionCommand.java +++ /dev/null @@ -1,72 +0,0 @@ -package io.quarkus.cli.commands; - -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; - -import org.aesh.command.Command; -import org.aesh.command.CommandDefinition; -import org.aesh.command.CommandException; -import org.aesh.command.CommandResult; -import org.aesh.command.invocation.CommandInvocation; -import org.aesh.command.option.Argument; -import org.aesh.command.option.Option; -import org.aesh.io.Resource; - -import io.quarkus.dependencies.Extension; -import io.quarkus.devtools.commands.RemoveExtensions; -import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; -import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; - -@CommandDefinition(name = "remove-extension", description = "Remove extension from a project") -public class RemoveExtensionCommand implements Command { - - @Option(shortName = 'h', hasValue = false, overrideRequired = true) - private boolean help; - - @Option(shortName = 'e', required = true, description = "Name of the extension that will be removed from the project") - private String extension; - - @Argument(description = "path to the project", required = true) - private Resource path; - - @Override - public CommandResult execute(CommandInvocation commandInvocation) throws CommandException, InterruptedException { - if (help) { - commandInvocation.println(commandInvocation.getHelpInfo("quarkus remove-extension")); - return CommandResult.SUCCESS; - } else { - - final QuarkusPlatformDescriptor platformDescr = QuarkusPlatformConfig.getGlobalDefault().getPlatformDescriptor(); - if (!findExtension(extension, platformDescr.getExtensions())) { - commandInvocation.println("Can not find any extension named: " + extension); - return CommandResult.SUCCESS; - } - try { - final Path projectPath = Paths.get(path.getAbsolutePath()); - RemoveExtensions project = new RemoveExtensions(QuarkusProject.resolveExistingProject(projectPath, - platformDescr)).extensions(Collections.singleton(extension)); - QuarkusCommandOutcome result = project.execute(); - if (!result.isSuccess()) { - throw new CommandException("Unable to remove an extension matching " + extension); - } - } catch (Exception e) { - throw new CommandException("Unable to remove an extension matching " + extension, e); - } - } - - return CommandResult.SUCCESS; - } - - private boolean findExtension(String name, List extensions) { - for (Extension ext : extensions) { - if (ext.getName().equalsIgnoreCase(name)) { - return true; - } - } - return false; - } -} diff --git a/devtools/bom-descriptor-json/pom.xml b/devtools/bom-descriptor-json/pom.xml index 74b59fdc1bff4..348d3c8d8f245 100644 --- a/devtools/bom-descriptor-json/pom.xml +++ b/devtools/bom-descriptor-json/pom.xml @@ -6,31 +6,58 @@ quarkus-devtools-all io.quarkus 999-SNAPSHOT - ../ 4.0.0 - quarkus-bom-descriptor-json + quarkus-bom-quarkus-platform-descriptor pom Quarkus - BOM - Descriptor JSON - - + + process-resources + + + maven-resources-plugin + + + copy-resources + validate + + copy-resources + + + ${basedir}/target/resources + + + ${basedir}/src/main/resources + true + + + + + + io.quarkus quarkus-platform-descriptor-json-plugin ${project.version} - process-resources + ${pluginPhase} - generate-extensions-json + + generate-platform-descriptor-json + + software.amazon.awssdk + quarkus-bom + true + ${basedir}/target/resources/catalog-overrides.json @@ -38,5 +65,2435 @@ + + + + + bom-descriptor-json-hollow + + + incremental + + + + none + true + + + + bom-descriptor-json-deps + + + !incremental + + + + + + + io.quarkus + quarkus-agroal + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-alexa + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-dynamodb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-iam + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-kms + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-rest + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-xray + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-s3 + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-ses + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-sns + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-sqs + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-arc + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-artemis-core + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-artemis-jms + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-avro + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-azure-functions-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-cache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-caffeine + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-config-yaml + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-consul-config + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-docker + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-jib + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-openshift + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-s2i + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-core + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-datasource + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-client-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-high-level-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-jdbc + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-ldap + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-oauth2 + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-properties-file + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-flyway + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-amazon-lambda + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-google-cloud-functions + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-knative-events + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-server-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-google-cloud-functions + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-google-cloud-functions-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-grpc + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-grpc-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-envers + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-kotlin + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-rest-data-panache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive-panache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive-panache-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-search-orm-elasticsearch + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-validator + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-infinispan-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jackson + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jacoco + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaeger + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxp + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxrs-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-db2 + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-derby + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-h2 + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mariadb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mssql + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mysql + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-postgresql + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jgit + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsch + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsonb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsonp + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kafka-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kafka-streams + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-keycloak-admin-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-keycloak-authorization + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kotlin + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-config + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-service-binding + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-liquibase + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-gelf + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-json + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-sentry + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mailer + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-micrometer + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-micrometer-registry-prometheus + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-minikube + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache-kotlin + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-rest-data-panache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mutiny + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mutiny-reactive-streams-operators + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-narayana-jta + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-narayana-stm + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-neo4j + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-netty + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-client-filter + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-token-propagation + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-openshift + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-openshift-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-picocli + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-quartz + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-qute + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-datasource + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-db2-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-messaging-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-mysql-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-pg-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-redis-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jackson + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jaxb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jsonb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-mutiny + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-reactive + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jackson + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jaxb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jsonb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-multipart + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-mutiny + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-mutiny-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-qute + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jackson + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jsonb + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-qute + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-servlet + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-server-common + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-scala + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-scheduler + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-security + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-security-jpa + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-context-propagation + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-fault-tolerance + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-graphql + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-health + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-jwt + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-jwt-build + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-metrics + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-openapi + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-opentracing + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-amqp + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-kafka + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-mqtt + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-streams-operators + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-type-converters + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-boot-properties + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-cache + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-cloud-config-client + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-data-jpa + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-data-rest + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-di + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-scheduled + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-security + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-web + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-swagger-ui + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-tika + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-undertow + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vault + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-core + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-graphql + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-http + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-web + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-webjars-locator + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-websockets + ${project.version} + pom + test + + + * + * + + + + + + + - + \ No newline at end of file diff --git a/devtools/bom-descriptor-json/src/main/resources/catalog-overrides.json b/devtools/bom-descriptor-json/src/main/resources/catalog-overrides.json new file mode 100644 index 0000000000000..7005885428673 --- /dev/null +++ b/devtools/bom-descriptor-json/src/main/resources/catalog-overrides.json @@ -0,0 +1,144 @@ +{ + "categories": [ + { + "name": "Web", + "id": "web", + "description": "Everything you need for REST endpoints, HTTP and web formats like JSON", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-resteasy", + "io.quarkus:quarkus-resteasy-jackson", + "io.quarkus:quarkus-resteasy-jsonb" + ] + } + }, + { + "name": "Data", + "id": "data", + "description": "Accessing and managing your data (RDBMS, NoSQL, caching, transaction management, etc)", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-hibernate-orm", + "io.quarkus:quarkus-hibernate-orm-panache", + "io.quarkus:quarkus-jdbc-postgresql", + "io.quarkus:quarkus-jdbc-mariadb", + "io.quarkus:quarkus-jdbc-mysql", + "io.quarkus:quarkus-jdbc-mssql", + "io.quarkus:quarkus-jdbc-db2", + "io.quarkus:quarkus-jdbc-h2", + "io.quarkus:quarkus-jdbc-derby" + ] + } + }, + { + "name": "Messaging", + "id": "messaging", + "description": "Send and receives message to various messaging systems (AMQP, KAfka etc)", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-smallrye-reactive-messaging", + "io.quarkus:quarkus-smallrye-reactive-messaging-amqp", + "io.quarkus:quarkus-smallrye-reactive-messaging-kafka", + "io.quarkus:quarkus-smallrye-reactive-messaging-mqtt" + ] + } + }, + { + "name": "Core", + "id": "core", + "description": "Core Quarkus components: engine, logging, etc.", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-config-yaml", + "io.quarkus:quarkus-logging-json" + ] + } + }, + { + "name": "Reactive", + "id": "reactive", + "description": "Non blocking stack and connectors", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-vertx", + "io.quarkus:quarkus-mutiny" + ] + } + }, + { + "name": "Cloud", + "id": "cloud", + "description": "Useful for Cloud Native deployments platforms like Kubernetes and cloud providers", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-kubernetes", + "io.quarkus:quarkus-openshift", + "io.quarkus:quarkus-smallrye-health", + "io.quarkus:quarkus-smallrye-fault-tolerance" + ] + } + }, + { + "name": "Observability", + "id": "observability", + "description": "Metrics, tracing, etc" + }, + { + "name": "Security", + "id": "security", + "description": "Everything you need to secure your application", + "metadata": { + "pinned": [ + "io.quarkus:quarkus-oidc", + "io.quarkus:quarkus-smallrye-jwt" + ] + } + }, + { + "name": "Serialization", + "id": "serialization", + "description": "Serializing and deserializing various formats" + }, + { + "name": "Miscellaneous", + "id": "miscellaneous", + "description": "Mixed bag of good stuff" + }, + { + "name": "Compatibility", + "id": "compatibility", + "description": "Support for alternative programming models on Quarkus" + }, + { + "name": "Alternative languages", + "id": "alt-languages", + "description": "Support for other JVM based languages" + } + ], + "metadata":{ + "project": { + "properties": { + "doc-root": "https://quarkus.io", + "rest-assured-version": "${rest-assured.version}", + "compiler-plugin-version": "${compiler-plugin.version}", + "surefire-plugin-version": "${version.surefire.plugin}", + "kotlin-version": "${kotlin.version}", + "scala-version": "${scala.version}", + "scala-plugin-version": "${scala-plugin.version}", + "quarkus-core-version": "${project.version}", + "maven-plugin-groupId": "${project.groupId}", + "maven-plugin-artifactId": "quarkus-maven-plugin", + "maven-plugin-version": "${project.version}", + "gradle-plugin-id": "io.quarkus", + "gradle-plugin-version": "${project.version}", + "supported-maven-versions": "${supported-maven-versions}", + "proposed-maven-version": "${proposed-maven-version}", + "maven-wrapper-version": "${maven-wrapper.version}", + "gradle-wrapper-version": "${gradle-wrapper.version}" + } + }, + "codestarts-artifacts": [ + "${project.groupId}:quarkus-platform-descriptor-json::jar:${project.version}" + ] + } +} \ No newline at end of file diff --git a/devtools/bom-descriptor-json/src/main/resources/extensions-overrides.json b/devtools/bom-descriptor-json/src/main/resources/extensions-overrides.json deleted file mode 100644 index 53c32b86548ac..0000000000000 --- a/devtools/bom-descriptor-json/src/main/resources/extensions-overrides.json +++ /dev/null @@ -1,411 +0,0 @@ -{ - "categories": [ - { - "name": "Core", - "id": "core", - "description": "Essential Quarkus components. Provided automatically" - }, - { - "name": "Web", - "id": "web", - "description": "Everything you need for REST endpoints, HTTP and web formats like JSON", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-resteasy", - "io.quarkus:quarkus-resteasy-jsonb", - "io.quarkus:quarkus-resteasy-jackson" - ] - } - }, - { - "name": "Data", - "id": "data", - "description": "Accessing and managing your data (RDBMS, NoSQL, caching, transaction management, etc)", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-hibernate-orm", - "io.quarkus:quarkus-hibernate-orm-panache", - "io.quarkus:quarkus-jdbc-postgresql", - "io.quarkus:quarkus-jdbc-mariadb", - "io.quarkus:quarkus-jdbc-mysql", - "io.quarkus:quarkus-jdbc-mssql", - "io.quarkus:quarkus-jdbc-h2", - "io.quarkus:quarkus-jdbc-derby" - ] - } - }, - { - "name": "Messaging", - "id": "messaging", - "description": "Send and receives message to various messaging ssytems (AMQP, KAfka etc)", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-smallrye-reactive-messaging", - "io.quarkus:quarkus-smallrye-reactive-messaging-amqp", - "io.quarkus:quarkus-smallrye-reactive-messaging-kafka", - "io.quarkus:quarkus-smallrye-reactive-messaging-mqtt" - ] - } - }, - { - "name": "Reactive", - "id": "reactive", - "description": "Non blocking stack and connectors", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-vertx" - ] - } - }, - { - "name": "Cloud", - "id": "cloud", - "description": "Useful for Cloud Native deployments platforms like Kubernetes and cloud providers", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-kubernetes", - "io.quarkus:quarkus-smallrye-health", - "io.quarkus:quarkus-smallrye-fault-tolerance" - ] - } - }, - { - "name": "Observability", - "id": "observability", - "description": "Metrics, tracing, etc" - }, - { - "name": "Security", - "id": "security", - "description": "Everything you need to secure your application", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-oidc", - "io.quarkus:quarkus-smallrye-jwt" - ] - } - }, - { - "name": "Integration", - "id": "integration", - "description": "Connectors to read to write from a skew of systems (file, S#, Twitter, etc)", - "metadata": { - "pinned": [ - "org.apache.camel.quarkus:camel-quarkus-core", - "org.apache.camel.quarkus:camel-quarkus-core-xml" - ] - } - }, - { - "name": "gRPC", - "id": "grpc", - "description": "gRPC integration", - "metadata": { - "pinned": [ - "io.quarkus:quarkus-grpc" - ] - } - }, - { - "name": "Business Automation", - "id": "business-automation", - "description": "Rules engine, BPM, etc" - }, - { - "name": "Serialization", - "id": "serialization", - "description": "Serializing and deserializing various formats" - }, - { - "name": "Miscellaneous", - "id": "miscellaneous", - "description": "Mixed bag of good stuff" - }, - { - "name": "Compatibility", - "id": "compatibility", - "description": "Support for alternative programming models on Quarkus" - }, - { - "name": "Alternative languages", - "id": "alt-languages", - "description": "Support for other JVM based languages" - } - ], - "extensions": [ - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-support-common", - "version": "0.3.1", - "description": "Camel Quarkus Support Common", - "metadata": { - "unlisted": true - } - }, - { - "metadata": { - "unlisted": true - }, - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-support-jetty", - "version": "0.3.1", - "description": "Camel Quarkus Support Jetty" - }, - { - "metadata": { - "unlisted": true - }, - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-support-xstream", - "version": "0.3.1", - "description": "Camel Quarkus Support XStream" - }, - { - "metadata": { - "unlisted": true - }, - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-support-xml", - "version": "0.3.1", - "description": "Camel Quarkus Support XML" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-core", - "version": "0.3.1", - "description": "The Camel Quarkus core module" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-core-cloud", - "version": "0.3.1", - "description": "The Camel Quarkus core cloud module" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-core-xml", - "version": "0.3.1", - "description": "Camel Quarkus core XML module" - }, - { - "metadata": { - "unlisted": true - }, - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-http-common", - "version": "0.3.1", - "description": "Camel Quarkus HTTP Common" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-aws-eks", - "version": "0.3.1", - "description": "A Camel Amazon EKS Web Service Component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-aws-s3", - "version": "0.3.1", - "description": "A Camel Amazon S3 Web Service Component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-aws-sqs", - "version": "0.3.1", - "description": "A Camel Amazon SQS Web Service Component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-aws-sns", - "version": "0.3.1", - "description": "A Camel Amazon SNS Web Service Component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-bean", - "version": "0.3.1", - "description": "Camel Bean component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-csv", - "version": "0.3.1", - "description": "Camel CSV data format support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-direct", - "version": "0.3.1", - "description": "Camel Direct component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-fhir", - "version": "0.3.1", - "description": "Camel FHIR HL7 support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-infinispan", - "version": "0.3.1", - "description": "Camel Infinispan support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-jackson", - "version": "0.3.1", - "description": "Camel Jackson support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-jdbc", - "version": "0.3.1", - "description": "Camel JDBC suppors" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-log", - "version": "0.3.1", - "description": "Camel Log component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-microprofile-health", - "version": "0.3.1", - "description": "Integration with the Quarkus MicroProfile Health extension" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-microprofile-metrics", - "version": "0.3.1", - "description": "Integration with the Quarkus MicroProfile Metrics extension" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-mail", - "version": "0.3.1", - "description": "Camel Mail support" - }, - { - "name": "Camel Quarkus Netty HTTP", - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-netty-http", - "version": "0.3.1", - "description": "Camel Netty HTTP support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-opentracing", - "version": "0.3.1", - "description": "Distributed tracing using OpenTracing" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-paho", - "version": "0.3.1", - "description": "Camel Eclipse Paho support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-platform-http", - "version": "0.3.1", - "description": "HTTP platform component is used for integrating Camel HTTP with Quarkus HTTP layer" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-reactive-executor", - "version": "0.3.1", - "description": "Integrates Quarkus reactive executor with Camel" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-rest", - "version": "0.3.1", - "description": "Camel REST component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-salesforce", - "version": "0.3.1", - "description": "Camel Salesforce support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-servlet", - "version": "0.3.1", - "description": "Camel servlet transport support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-slack", - "version": "0.3.1", - "description": "Camel Slack Support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-tarfile", - "version": "0.3.1", - "description": "Camel Tar file support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-timer", - "version": "0.3.1", - "description": "Camel Timer component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-twitter", - "version": "0.3.1", - "description": "Camel Twitter support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-vm", - "version": "0.3.1", - "description": "Camel VM component" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-zipfile", - "version": "0.3.1", - "description": "Camel Zip file support" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-attachments", - "version": "0.3.1", - "description": "Java Attachments support for Camel Message" - }, - { - "group-id": "org.apache.camel.quarkus", - "artifact-id": "camel-quarkus-pdf", - "version": "0.3.1", - "description": "Camel PDF support" - }, - { - "group-id": "org.kie.kogito", - "artifact-id": "kogito-quarkus", - "version": "0.8.0", - "description": "Kogito extension" - }, - { - "group-id": "org.optaplanner", - "artifact-id": "optaplanner-quarkus", - "version": "7.39.0.CR1", - "description": "OptaPlanner extension" - }, - { - "group-id": "org.optaplanner", - "artifact-id": "optaplanner-quarkus-jackson", - "version": "7.39.0.CR1", - "description": "OptaPlanner Jackson extension" - }, - { - "group-id": "org.optaplanner", - "artifact-id": "optaplanner-quarkus-jsonb", - "version": "7.39.0.CR1", - "description": "OptaPlanner JSON-B extension" - } - ] -} diff --git a/devtools/cli/pom.xml b/devtools/cli/pom.xml new file mode 100644 index 0000000000000..582a33a4e773b --- /dev/null +++ b/devtools/cli/pom.xml @@ -0,0 +1,125 @@ + + + + quarkus-build-parent + io.quarkus + 999-SNAPSHOT + ../../build-parent/pom.xml + + 4.0.0 + + quarkus-cli + Quarkus - Command Line Interface + Quarkus command line utility + + + io.quarkus + quarkus-picocli + + + io.quarkus + quarkus-devtools-common + + + io.quarkus + quarkus-platform-descriptor-json + + + io.quarkus + quarkus-test-devtools + test + + + org.junit.jupiter + junit-jupiter + test + + + org.assertj + assertj-core + test + + + + io.quarkus + quarkus-picocli-deployment + pom + test + ${project.version} + + + * + * + + + + + + + + + src/test/resources + true + + + + + io.quarkus + quarkus-maven-plugin + + + + build + generate-code + generate-code-tests + + + true + + + + + + + + + + native + + + native + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + integration-test + verify + + + + + ${project.build.directory}/${project.build.finalName}-runner + + + + + + + + + + native + + + + + + diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Add.java b/devtools/cli/src/main/java/io/quarkus/cli/Add.java new file mode 100644 index 0000000000000..8d811879d2256 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Add.java @@ -0,0 +1,66 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.AddExtensions; +import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +@CommandLine.Command(name = "add", usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Add extension(s) to current project.") +public class Add extends BaseSubCommand implements BuildsystemCommand { + + @CommandLine.Parameters(arity = "1", paramLabel = "EXTENSION", description = "extensions to add to project") + Set extensions; + + @Override + public boolean aggregate(BuildTool buildtool) { + return buildtool != BuildTool.MAVEN; + } + + @Override + public int execute(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) { + return addMaven(projectDir); + } else { + throw new IllegalStateException("Should be unreachable"); + } + } + + @Override + public List getArguments(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) + throw new IllegalStateException("Should be unreachable"); + ArrayList args = new ArrayList<>(); + args.add("addExtension"); + String param = "--extensions=" + String.join(",", extensions); + args.add(param); + return args; + } + + private Integer addMaven(Path projectDirectory) { + try { + QuarkusProject quarkusProject = QuarkusProjectHelper.getProject(projectDirectory, QuarkusCliVersion.version()); + + AddExtensions project = new AddExtensions(quarkusProject); + project.extensions(extensions); + + QuarkusCommandOutcome result = project.execute(); + return result.isSuccess() ? CommandLine.ExitCode.OK : CommandLine.ExitCode.SOFTWARE; + } catch (Exception e) { + if (parent.showErrors) + e.printStackTrace(err()); + err().println("Unable to add extension" + (extensions.size() > 1 ? "s" : "") + ": " + e.getMessage()); + return CommandLine.ExitCode.SOFTWARE; + } + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Build.java b/devtools/cli/src/main/java/io/quarkus/cli/Build.java new file mode 100644 index 0000000000000..2adf92062563a --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Build.java @@ -0,0 +1,57 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.LinkedList; +import java.util.List; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.devtools.project.BuildTool; +import picocli.CommandLine; + +@CommandLine.Command(name = "build", usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Build your quarkus project") +public class Build extends BaseSubCommand implements BuildsystemCommand { + + @CommandLine.Option(names = { "-n", + "--native" }, order = 4, description = "Build native executable.") + boolean isNative = false; + + @CommandLine.Option(names = { "-s", + "--skip-tests" }, order = 5, description = "Skip tests.") + boolean skipTests = false; + + @CommandLine.Option(names = { "--offline" }, order = 6, description = "Work offline.") + boolean offline = false; + + @Override + public boolean aggregate(BuildTool buildtool) { + return true; + } + + @Override + public List getArguments(Path projectDir, BuildTool buildtool) { + LinkedList args = new LinkedList<>(); + if (buildtool == BuildTool.MAVEN) { + args.add("install"); + if (isNative) + args.add("-Dnative"); + if (skipTests) { + args.add("-DskipTests"); + args.add("-Dmaven.test.skip=true"); + } + if (offline) + args.add("--offline"); + } else { + args.add("build"); + if (isNative) + args.add("-Dquarkus.package.type=native"); + if (skipTests) { + args.add("-x"); + args.add("test"); + } + if (offline) + args.add("--offline"); + } + return args; + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Clean.java b/devtools/cli/src/main/java/io/quarkus/cli/Clean.java new file mode 100644 index 0000000000000..3a6e8c96d7e81 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Clean.java @@ -0,0 +1,28 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.devtools.project.BuildTool; +import picocli.CommandLine; + +@CommandLine.Command(name = "clean", usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Clean current project") +public class Clean extends BaseSubCommand implements BuildsystemCommand { + + @Override + public boolean aggregate(BuildTool buildtool) { + return true; + } + + @Override + public List getArguments(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) { + return Collections.singletonList("clean"); + } else { + return Collections.singletonList("clean"); + } + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Create.java b/devtools/cli/src/main/java/io/quarkus/cli/Create.java new file mode 100644 index 0000000000000..587b571642c6f --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Create.java @@ -0,0 +1,145 @@ +package io.quarkus.cli; + +import java.io.File; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.Callable; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.CreateProject; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import io.quarkus.devtools.project.codegen.SourceType; +import picocli.CommandLine; + +@CommandLine.Command(name = "create", sortOptions = false, usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Create a new quarkus project.") +public class Create extends BaseSubCommand implements Callable { + + @CommandLine.Option(names = { "-g", + "--group-id" }, order = 1, paramLabel = "GROUP-ID", description = "The groupId for project") + String groupId = "org.acme"; + + @CommandLine.Option(names = { "-a", + "--artifact-id" }, order = 2, paramLabel = "ARTIFACT-ID", description = "The artifactId for project") + String artifactId = "code-with-quarkus"; + + @CommandLine.Option(names = { "-v", + "--version" }, order = 3, paramLabel = "VERSION", description = "The version for project") + String version = "1.0.0-SNAPSHOT"; + + @CommandLine.Option(names = { "-0", + "--no-examples" }, order = 4, description = "Generate without example code.") + boolean noExamples = false; + + @CommandLine.Option(names = { "-e", + "--examples" }, order = 4, description = "Choose which example(s) you want in the generated Quarkus application.") + Set examples; + + @CommandLine.ArgGroup() + TargetBuildTool targetBuildTool = new TargetBuildTool(); + + static class TargetBuildTool { + @CommandLine.Option(names = { "--maven" }, order = 5, description = "Create a Maven project. (default)") + boolean maven = false; + + @CommandLine.Option(names = { "--gradle" }, order = 6, description = "Create a Gradle project.") + boolean gradle = false; + + @CommandLine.Option(names = { + "--grade-kotlin-dsl" }, order = 7, description = "Create a Gradle Kotlin DSL project.") + boolean gradleKotlinDsl = false; + } + + @CommandLine.ArgGroup() + TargetLanguage language = new TargetLanguage(); + + static class TargetLanguage { + @CommandLine.Option(names = { + "--java" }, order = 8, description = "Generate Java examples. (default)") + boolean java = false; + + @CommandLine.Option(names = { + "--kotlin" }, order = 9, description = "Generate Kotlin examples.") + boolean kotlin = false; + + @CommandLine.Option(names = { + "--scala" }, order = 10, description = "Generate Scala examples.") + boolean scala = false; + } + + @CommandLine.Parameters(arity = "0..1", paramLabel = "EXTENSION", description = "extension to add to project") + Set extensions; + + @Override + public Integer call() throws Exception { + try { + File projectDirectory = new File(System.getProperty("user.dir")); + + File projectRoot = new File(projectDirectory.getAbsoluteFile(), artifactId); + if (projectRoot.exists()) { + err().println("Unable to create the project, " + + "the directory " + projectRoot.getAbsolutePath() + " already exists"); + return CommandLine.ExitCode.SOFTWARE; + } + + SourceType sourceType = SourceType.JAVA; + if (targetBuildTool.gradleKotlinDsl) { + sourceType = SourceType.KOTLIN; + if (extensions == null) + extensions = new HashSet<>(); + if (!extensions.contains("kotlin")) + extensions.add("quarkus-kotlin"); + } else if (language.scala) { + sourceType = SourceType.SCALA; + if (extensions == null) + extensions = new HashSet<>(); + if (!extensions.contains("scala")) + extensions.add("quarkus-scala"); + } else if (language.kotlin) { + sourceType = SourceType.KOTLIN; + if (extensions == null) + extensions = new HashSet<>(); + if (!extensions.contains("kotlin")) + extensions.add("quarkus-kotlin"); + } else if (extensions != null && !extensions.isEmpty()) { + sourceType = CreateProject.determineSourceType(extensions); + } + + BuildTool buildTool = BuildTool.MAVEN; + if (targetBuildTool.gradle) + buildTool = BuildTool.GRADLE; + else if (targetBuildTool.gradleKotlinDsl) + buildTool = BuildTool.GRADLE_KOTLIN_DSL; + + final QuarkusProject project = QuarkusProjectHelper.getProject(projectRoot.getAbsoluteFile().toPath(), buildTool, + QuarkusCliVersion.version()); + boolean status = new CreateProject(project) + .groupId(groupId) + .artifactId(artifactId) + .version(version) + .sourceType(sourceType) + .overrideExamples(examples) + .extensions(extensions) + .noExamples(noExamples) + .execute().isSuccess(); + + if (status) { + out().println("Project " + artifactId + + " created."); + parent.setProjectDirectory(projectRoot.toPath().toAbsolutePath()); + } else { + err().println("Failed to create project"); + return CommandLine.ExitCode.SOFTWARE; + } + } catch (Exception e) { + err().println("Project creation failed, " + e.getMessage()); + if (parent.showErrors) + e.printStackTrace(err()); + return CommandLine.ExitCode.SOFTWARE; + } + + return CommandLine.ExitCode.OK; + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/CreateExtension.java b/devtools/cli/src/main/java/io/quarkus/cli/CreateExtension.java new file mode 100644 index 0000000000000..61a1d76635565 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/CreateExtension.java @@ -0,0 +1,170 @@ +package io.quarkus.cli; + +import static io.quarkus.devtools.commands.CreateExtension.extractQuarkiverseExtensionId; +import static io.quarkus.devtools.commands.CreateExtension.isQuarkiverseGroupId; +import static io.quarkus.devtools.commands.CreateExtension.resolveModel; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isEmpty; + +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.concurrent.Callable; + +import org.apache.maven.model.Model; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.devtools.commands.data.QuarkusCommandException; +import io.quarkus.devtools.utils.Prompter; +import picocli.CommandLine; + +@CommandLine.Command(name = "create-extension", sortOptions = false, usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Creates the base of a Quarkus extension in different layout depending of the options and environment.") +public class CreateExtension extends BaseSubCommand implements Callable { + + @CommandLine.Option(names = { "-g", + "--group-id" }, order = 1, paramLabel = "GROUP-ID", description = "The groupId for project.") + String groupId; + + @CommandLine.Option(names = { "-i", + "--extension-id" }, order = 0, paramLabel = "EXTENSION-ID", description = "The extension id.") + String extensionId; + + @CommandLine.Option(names = { "-v", + "--version" }, order = 3, paramLabel = "VERSION", description = "The version for the extension.") + String version = "1.0.0-SNAPSHOT"; + + @CommandLine.Option(names = { "-q", + "--quarkus-version" }, order = 4, paramLabel = "QUARKUS-VERSION", description = "The quarkus version for the extension.") + String quarkusVersion; + + @CommandLine.Option(names = { "-N", + "--namespace-id" }, order = 5, paramLabel = "NAMESPACE-ID", description = "A prefix common to all extension modules artifactIds.") + String namespaceId; + + @CommandLine.Option(names = { "-p", + "--package-name" }, order = 6, paramLabel = "PACKAGE-NAME", description = "Base package under which classes should be created in Runtime and Deployment modules.. When specified, use a custom package name instead of auto generating it from other parameters.") + String packageName; + + @CommandLine.Option(names = { + "--extension-name" }, order = 7, paramLabel = "EXTENSION-NAME", description = "When specified, use a custom extension name instead of generating it from the extension id.") + String extensionName; + + @CommandLine.Option(names = { + "--namespace-name" }, order = 8, paramLabel = "NAMESPACE-NAME", description = "A prefix common to all extension modules names. When specified, use a custom namespace name instead of generating it from the namespace id.") + String namespaceName; + + @CommandLine.Option(names = { + "--bom-group-id" }, order = 9, paramLabel = "BOM-GROUP-ID", description = "The group id of the Quarkus platform BOM.") + String quarkusBomGroupId; + + @CommandLine.Option(names = { + "--bom-artifact-id" }, order = 10, paramLabel = "BOM-ARTIFACT-ID", description = "The artifact id of the Quarkus platform BOM.") + String quarkusBomArtifactId; + + @CommandLine.Option(names = { + "--bom-version" }, order = 11, paramLabel = "BOM-VERSION", description = "The version id of the Quarkus platform BOM.") + String quarkusBomVersion; + + @CommandLine.Option(names = { + "--without-unit-test" }, order = 12, description = "Indicates whether to generate a unit test class for the extension.") + boolean withoutUnitTest; + + @CommandLine.Option(names = { + "--without-it-tests" }, order = 13, description = "Indicates whether to generate a integration tests for the extension.") + boolean withoutIntegrationTests; + + @CommandLine.Option(names = { + "--without-devmode-test" }, order = 14, description = "Indicates whether to generate a dev mode test class for the extension.") + boolean withoutDevModeTest; + + @CommandLine.Option(names = { + "--without-tests" }, order = 15, description = "Indicates whether to generate any test for the extension.") + boolean withoutTests; + + @CommandLine.Spec + CommandLine.Model.CommandSpec spec; // + + @Override + public Integer call() throws Exception { + try { + Path dir = Paths.get(System.getProperty("user.dir")); + + promptValues(dir); + autoComputeQuarkiverseExtensionId(); + + final io.quarkus.devtools.commands.CreateExtension createExtension = new io.quarkus.devtools.commands.CreateExtension( + dir) + .extensionId(extensionId) + .extensionName(extensionName) + .groupId(groupId) + .version(version) + .packageName(packageName) + .namespaceId(namespaceId) + .namespaceName(namespaceName) + .quarkusVersion(quarkusVersion) + .quarkusBomGroupId(quarkusBomGroupId) + .quarkusBomArtifactId(quarkusBomArtifactId) + .quarkusBomGroupId(quarkusBomVersion) + .withoutUnitTest(withoutTests || withoutUnitTest) + .withoutDevModeTest(withoutTests || withoutDevModeTest) + .withoutIntegrationTests(withoutTests || withoutIntegrationTests); + + try { + boolean success = createExtension.execute().isSuccess(); + if (!success) { + err().println("Failed to generate Extension"); + return CommandLine.ExitCode.SOFTWARE; + } + } catch (QuarkusCommandException e) { + err().println("Failed to generate Extension, " + e.getMessage()); + if (parent.showErrors) { + e.printStackTrace(err()); + } + return CommandLine.ExitCode.SOFTWARE; + } + + } catch (Exception e) { + err().println("Failed to generate Extension, " + e.getMessage()); + if (parent.showErrors) { + e.printStackTrace(err()); + } + return CommandLine.ExitCode.SOFTWARE; + } + + return CommandLine.ExitCode.OK; + } + + private void autoComputeQuarkiverseExtensionId() { + if (isQuarkiverseGroupId(groupId) && isEmpty(extensionId)) { + extensionId = extractQuarkiverseExtensionId(groupId); + } + } + + private void promptValues(Path dir) throws QuarkusCommandException { + if (batchMode) { + if (isBlank(extensionId)) { + throw new CommandLine.MissingParameterException(spec.commandLine(), spec.optionsMap().get("-id"), + "Missing required option: '--extension-id=EXTENSION-ID'"); + } + return; + } + try { + final Prompter prompter = new Prompter(); + final Model model = resolveModel(dir); + if (model == null || !model.getArtifactId().endsWith("quarkus-parent")) { + if (isBlank(quarkusVersion)) { + quarkusVersion = prompter.prompt("Set the Quarkus version"); + } + if (isBlank(groupId)) { + groupId = prompter.promptWithDefaultValue("Set the extension groupId", "org.acme"); + } + } + autoComputeQuarkiverseExtensionId(); + if (isBlank(extensionId)) { + extensionId = prompter.prompt("Set the extension id"); + } + } catch (IOException e) { + throw new QuarkusCommandException("Unable to get user input", e); + } + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/CreateJBang.java b/devtools/cli/src/main/java/io/quarkus/cli/CreateJBang.java new file mode 100644 index 0000000000000..fd523a6a72a0a --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/CreateJBang.java @@ -0,0 +1,60 @@ +package io.quarkus.cli; + +import java.io.File; +import java.util.Set; +import java.util.concurrent.Callable; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.CreateJBangProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +@CommandLine.Command(name = "create-jbang", sortOptions = false, usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Create a new quarkus jbang project.") +public class CreateJBang extends BaseSubCommand implements Callable { + + @CommandLine.Option(names = { "-n", + "--no-wrapper" }, order = 1, description = "Generate without JBang wrapper.") + boolean noJBangWrapper = false; + + @CommandLine.Option(names = { "-o", + "--output-folder" }, order = 2, paramLabel = "OUTPUT-FOLDER", description = "The output folder for project") + String outputFolder = "jbang-with-quarkus"; + + @CommandLine.Parameters(arity = "0..1", paramLabel = "EXTENSION", description = "Extensions to add to project") + Set extensions; + + @Override + public Integer call() throws Exception { + try { + File projectDirectory = new File(System.getProperty("user.dir")); + + File projectRoot = new File(projectDirectory.getAbsoluteFile(), outputFolder); + if (projectRoot.exists()) { + err().println("Unable to create the project, " + + "the directory " + projectRoot.getAbsolutePath() + " already exists"); + return CommandLine.ExitCode.SOFTWARE; + } + + boolean status = new CreateJBangProject( + QuarkusProjectHelper.getProject(projectRoot.getAbsoluteFile().toPath(), QuarkusCliVersion.version())) + .extensions(extensions) + .setValue("noJBangWrapper", noJBangWrapper) + .execute() + .isSuccess(); + if (status) { + out().println("JBang project created."); + parent.setProjectDirectory(projectRoot.toPath().toAbsolutePath()); + } else { + err().println("Failed to create JBang project"); + return CommandLine.ExitCode.SOFTWARE; + } + } catch (Exception e) { + err().println("JBang project creation failed, " + e.getMessage()); + if (parent.showErrors) + e.printStackTrace(err()); + return CommandLine.ExitCode.SOFTWARE; + } + return CommandLine.ExitCode.OK; + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Dev.java b/devtools/cli/src/main/java/io/quarkus/cli/Dev.java new file mode 100644 index 0000000000000..76f8c9a248ecd --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Dev.java @@ -0,0 +1,28 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.devtools.project.BuildTool; +import picocli.CommandLine; + +@CommandLine.Command(name = "dev", usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Execute project in live coding dev mode") +public class Dev extends BaseSubCommand implements BuildsystemCommand { + + @Override + public boolean aggregate(BuildTool buildtool) { + return true; + } + + @Override + public List getArguments(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) { + return Collections.singletonList("quarkus:dev"); + } else { + return Collections.singletonList("quarkusDev"); + } + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/List.java b/devtools/cli/src/main/java/io/quarkus/cli/List.java new file mode 100644 index 0000000000000..40e7f09187c6d --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/List.java @@ -0,0 +1,111 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.ArrayList; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.ListExtensions; +import io.quarkus.devtools.commands.data.QuarkusCommandException; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +@CommandLine.Command(name = "list", usageHelpAutoWidth = true, sortOptions = false, mixinStandardHelpOptions = false, description = "List installed (default) or installable extensions.") +public class List extends BaseSubCommand implements BuildsystemCommand { + + @CommandLine.Option(names = { "-i", + "--installable" }, defaultValue = "false", order = 2, description = "Display installable extensions.") + boolean installable = false; + + @CommandLine.Option(names = { "-s", + "--search" }, defaultValue = "*", paramLabel = "PATTERN", order = 3, description = "Search filter on extension list. The format is based on Java Pattern.") + String searchPattern; + + @CommandLine.ArgGroup() + ExtensionFormat format = new ExtensionFormat(); + + static class ExtensionFormat { + @CommandLine.Option(names = { "--name" }, order = 4, description = "Display extension name only. (default)") + boolean name = false; + + @CommandLine.Option(names = { "--concise" }, order = 5, description = "Display extension name and description.") + boolean concise = false; + + @CommandLine.Option(names = { + "--full" }, order = 6, description = "Display concise format and version related columns.") + boolean full = false; + + @CommandLine.Option(names = { + "--origins" }, order = 7, description = "Display extensions including their platform origins.") + boolean origins = false; + + } + + @Override + public boolean aggregate(BuildTool buildtool) { + return buildtool != BuildTool.MAVEN; + } + + @Override + public int execute(Path projectDirectory, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) { + return listExtensionsMaven(projectDirectory); + } else { + throw new IllegalStateException("Should be unreachable"); + } + } + + private String getFormatString() { + String formatString = "name"; + if (format.concise) + formatString = "concise"; + else if (format.full) + formatString = "full"; + else if (format.origins) + formatString = "origins"; + return formatString; + } + + @Override + public java.util.List getArguments(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) + throw new IllegalStateException("Should be unreachable"); + ArrayList args = new ArrayList<>(); + args.add("listExtensions"); + args.add("--fromCli"); + args.add("--format=" + getFormatString()); + if (!installable) + args.add("--installed"); + if (searchPattern != null) + args.add("--searchPattern=" + searchPattern); + + return args; + } + + private Integer listExtensionsMaven(Path projectDirectory) { + // we do not have to spawn process for maven + try { + + new ListExtensions(QuarkusProjectHelper.getProject(projectDirectory, QuarkusCliVersion.version())) + .fromCli(true) + .all(false) + .installed(!installable) + .format(getFormatString()) + .search(searchPattern) + .execute(); + } catch (QuarkusCommandException e) { + if (parent.showErrors) + e.printStackTrace(err()); + return CommandLine.ExitCode.SOFTWARE; + } catch (IllegalStateException e) { + if (parent.showErrors) + e.printStackTrace(err()); + err().println("No project exists to list extensions from."); + return CommandLine.ExitCode.SOFTWARE; + } + return CommandLine.ExitCode.OK; + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Platforms.java b/devtools/cli/src/main/java/io/quarkus/cli/Platforms.java new file mode 100644 index 0000000000000..26d056ab48bfb --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Platforms.java @@ -0,0 +1,32 @@ +package io.quarkus.cli; + +import java.nio.file.Path; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.ListPlatforms; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +@CommandLine.Command(name = "platforms", usageHelpAutoWidth = true, sortOptions = false, mixinStandardHelpOptions = false, description = "List imported (default) or all available Quarkus platforms.") +public class Platforms extends BaseSubCommand implements BuildsystemCommand { + + @Override + public int execute(Path projectDirectory, BuildTool buildTool) { + try { + new ListPlatforms( + QuarkusProjectHelper.getProject(projectDirectory, buildTool == null ? BuildTool.MAVEN : buildTool, + QuarkusCliVersion.version())) + .execute(); + } catch (Exception e) { + if (parent.showErrors) { + e.printStackTrace(err()); + } + return CommandLine.ExitCode.SOFTWARE; + } + return CommandLine.ExitCode.OK; + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java b/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java new file mode 100644 index 0000000000000..83338f6773402 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java @@ -0,0 +1,184 @@ +package io.quarkus.cli; + +import java.io.PrintWriter; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.LinkedList; +import java.util.concurrent.Callable; + +import javax.inject.Inject; + +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.cli.core.ExecuteUtil; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.runtime.QuarkusApplication; +import io.quarkus.runtime.annotations.QuarkusMain; +import picocli.CommandLine; + +@QuarkusMain +@CommandLine.Command(name = "quarkus", aliases = { + "qs" }, versionProvider = QuarkusCliVersion.class, usageHelpAutoWidth = true, subcommandsRepeatable = true, mixinStandardHelpOptions = true, subcommands = { + Build.class, + Clean.class, Create.class, CreateJBang.class, List.class, Platforms.class, Add.class, Remove.class, Dev.class, + CreateExtension.class }) +public class QuarkusCli implements QuarkusApplication { + + public void usage() { + CommandLine.usage(this, System.out); + + } + + @CommandLine.Option(names = { "-e", "--errors" }, description = "Produce execution error messages.") + boolean showErrors; + + @CommandLine.Option(names = { "--verbose" }, description = "Verbose mode.") + boolean verbose; + + @CommandLine.Option(names = { "--manual-output" }, hidden = true, description = "For unit test purposes.") + boolean manualOutput; + + @CommandLine.Spec + protected CommandLine.Model.CommandSpec spec; + + public PrintWriter out() { + return spec.commandLine().getOut(); + } + + public PrintWriter err() { + return spec.commandLine().getErr(); + } + + public boolean isVerbose() { + return verbose; + } + + public boolean isShowErrors() { + return showErrors; + } + + public boolean isManualOutput() { + return manualOutput; + } + + @Inject + CommandLine.IFactory factory; + + private Path projectDirectory; + + public Path getProjectDirectory() { + if (projectDirectory == null) { + projectDirectory = Paths.get(System.getProperty("user.dir")).toAbsolutePath(); + } + return projectDirectory; + } + + public void setProjectDirectory(Path projectDirectory) { + this.projectDirectory = projectDirectory; + } + + @Override + public int run(String... args) throws Exception { + CommandLine cmd = factory == null ? new CommandLine(this) : new CommandLine(this, factory); + return cmd.setExecutionStrategy(new ExecutionStrategy()) + .setUsageHelpLongOptionsMaxWidth(30) + .execute(args); + } + + public BuildTool resolveBuildTool(Path projectDirectory) { + return QuarkusProject.resolveExistingProjectBuildTool(projectDirectory.toAbsolutePath()); + } + + public int executeBuildsystem(BuildTool buildtool, java.util.List args) throws Exception { + String[] newArgs = args.toArray(new String[args.size()]); + if (buildtool == BuildTool.MAVEN) { + return ExecuteUtil.executeMavenTarget(getProjectDirectory().toFile(), this, newArgs); + } else { + return ExecuteUtil.executeGradleTarget(getProjectDirectory().toFile(), this, newArgs); + } + } + + class ExecutionStrategy implements CommandLine.IExecutionStrategy { + @Override + public int execute(CommandLine.ParseResult parseResult) + throws CommandLine.ExecutionException, CommandLine.ParameterException { + try { + // Aggregate any maven/gradle commands into one build process + // if BuildsystemCommand.aggregate() returns true + LinkedList buildsystemArguments = new LinkedList<>(); + BuildTool buildtool = null; + if (parseResult.hasSubcommand()) { + for (CommandLine.ParseResult pr : parseResult.subcommands()) { + if (CommandLine.printHelpIfRequested(pr)) { + return CommandLine.ExitCode.USAGE; + } + // TODO: More recursion will be needed if any subcommand has sub-sub commands + for (CommandLine cl : pr.asCommandLineList()) { + try { + Object cmd = cl.getCommand(); + if (cmd instanceof BuildsystemCommand) { + if (buildtool == null) + buildtool = resolveBuildTool(getProjectDirectory()); + BuildsystemCommand build = (BuildsystemCommand) cmd; + if (build.aggregate(buildtool)) { + buildsystemArguments.addAll(build.getArguments(getProjectDirectory(), buildtool)); + } else { + if (buildsystemArguments.size() > 0) { + int exitCode = executeBuildsystem(buildtool, buildsystemArguments); + if (exitCode != CommandLine.ExitCode.OK) + return exitCode; + buildsystemArguments.clear(); + } + + int exitCode = build.execute(getProjectDirectory(), buildtool); + if (exitCode != CommandLine.ExitCode.OK) + return exitCode; + } + } else if (cmd instanceof Callable) { + int exitCode = ((Callable) cmd).call(); + if (exitCode != CommandLine.ExitCode.OK) + return exitCode; + } else { + err().println("Unknown execution type for : " + cl.getCommandName()); + return CommandLine.ExitCode.SOFTWARE; + } + } catch (Exception e) { + if (showErrors) + e.printStackTrace(err()); + err().println("Failure executing command : " + cl.getCommandName()); + return CommandLine.ExitCode.SOFTWARE; + } + } + } + if (buildsystemArguments.size() > 0) { + try { + return executeBuildsystem(buildtool, buildsystemArguments); + } catch (Exception e) { + if (showErrors) + e.printStackTrace(err()); + err().print("Failure executing build command : "); + ExecuteUtil.outputBuildCommand(err(), buildtool, buildsystemArguments); + err().println(); + return CommandLine.ExitCode.SOFTWARE; + } + } + } else { + if (parseResult.isVersionHelpRequested()) { + parseResult.asCommandLineList().iterator().next().printVersionHelp(out()); + } else { + // no subcommands executed + usage(); + } + } + return CommandLine.ExitCode.OK; + } catch (Exception e) { + if (showErrors) + e.printStackTrace(err()); + return CommandLine.ExitCode.SOFTWARE; + } + } + + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/Remove.java b/devtools/cli/src/main/java/io/quarkus/cli/Remove.java new file mode 100644 index 0000000000000..b8f1527fddef0 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/Remove.java @@ -0,0 +1,64 @@ +package io.quarkus.cli; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import io.quarkus.cli.core.BaseSubCommand; +import io.quarkus.cli.core.BuildsystemCommand; +import io.quarkus.cli.core.QuarkusCliVersion; +import io.quarkus.devtools.commands.RemoveExtensions; +import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +@CommandLine.Command(name = "remove", aliases = "rm", usageHelpAutoWidth = true, mixinStandardHelpOptions = false, description = "Remove an extension from this project.") +public class Remove extends BaseSubCommand implements BuildsystemCommand { + + @CommandLine.Parameters(arity = "1", paramLabel = "EXTENSION", description = "extensions to remove") + Set extensions; + + @Override + public boolean aggregate(BuildTool buildtool) { + return buildtool != BuildTool.MAVEN; + } + + @Override + public List getArguments(Path projectDir, BuildTool buildtool) { + if (buildtool == BuildTool.MAVEN) + throw new IllegalStateException("Should be unreachable"); + + ArrayList args = new ArrayList<>(); + args.add("removeExtension"); + String param = "--extensions=" + String.join(",", extensions); + args.add(param); + return args; + } + + @Override + public int execute(Path projectDir, BuildTool buildtool) throws Exception { + if (buildtool == BuildTool.MAVEN) { + return removeMaven(projectDir); + } else { + throw new IllegalStateException("Should be unreachable"); + } + } + + private Integer removeMaven(Path projectDirectory) { + try { + RemoveExtensions project = new RemoveExtensions( + QuarkusProjectHelper.getProject(projectDirectory, QuarkusCliVersion.version())) + .extensions(extensions); + QuarkusCommandOutcome result = project.execute(); + return result.isSuccess() ? CommandLine.ExitCode.OK : CommandLine.ExitCode.SOFTWARE; + } catch (Exception e) { + if (parent.showErrors) + e.printStackTrace(err()); + err().println("Unable to remove extension matching:" + e.getMessage()); + return CommandLine.ExitCode.SOFTWARE; + } + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/BaseSubCommand.java b/devtools/cli/src/main/java/io/quarkus/cli/core/BaseSubCommand.java new file mode 100644 index 0000000000000..cacc884cb8852 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/BaseSubCommand.java @@ -0,0 +1,30 @@ +package io.quarkus.cli.core; + +import java.io.PrintWriter; + +import io.quarkus.cli.QuarkusCli; +import picocli.CommandLine; + +public abstract class BaseSubCommand { + + @CommandLine.Option(names = { "-B", "--batch-mode" }, order = 100, description = "run command in batch mode") + protected boolean batchMode; + + @CommandLine.Option(names = { "-h", "--help" }, order = 101, usageHelp = true, description = "display this help message") + protected boolean help; + + @CommandLine.ParentCommand + protected QuarkusCli parent; + + @CommandLine.Spec + protected CommandLine.Model.CommandSpec spec; + + public PrintWriter out() { + return spec.commandLine().getOut(); + } + + public PrintWriter err() { + return spec.commandLine().getErr(); + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/BuildsystemCommand.java b/devtools/cli/src/main/java/io/quarkus/cli/core/BuildsystemCommand.java new file mode 100644 index 0000000000000..3f553449b9d96 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/BuildsystemCommand.java @@ -0,0 +1,43 @@ +package io.quarkus.cli.core; + +import java.nio.file.Path; +import java.util.List; + +import io.quarkus.devtools.project.BuildTool; + +/** + * A buildsystem command that can only be run solo + */ +public interface BuildsystemCommand { + /** + * Should this buildsystem command be run within one build process? + * + * @param buildtool + * @return + */ + default boolean aggregate(BuildTool buildtool) { + return false; + } + + /** + * If aggregate() then this method should be called. + * + * @param projectDir + * @param buildtool + * @return list of arguments to pass to build process + */ + default List getArguments(Path projectDir, BuildTool buildtool) { + throw new IllegalStateException("Command implemented incorrectly"); + } + + /** + * This command is responsible for executing buildsystem if aggregate() is false + * + * @param projectDir + * @param buildtool + * @return + */ + default int execute(Path projectDir, BuildTool buildtool) throws Exception { + throw new IllegalStateException("Command implemented incorrectly"); + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/ExecuteUtil.java b/devtools/cli/src/main/java/io/quarkus/cli/core/ExecuteUtil.java new file mode 100644 index 0000000000000..eb1f618b9b413 --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/ExecuteUtil.java @@ -0,0 +1,224 @@ +package io.quarkus.cli.core; + +import static picocli.CommandLine.ExitCode.OK; +import static picocli.CommandLine.ExitCode.SOFTWARE; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Optional; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import io.quarkus.cli.QuarkusCli; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.registry.config.RegistriesConfigLocator; +import io.quarkus.utilities.OS; +import picocli.CommandLine; + +public class ExecuteUtil { + + private static int executeGradle(File projectDirectory, QuarkusCli cli, String... args) + throws Exception { + File gradle = findExecutableFile("gradle"); + + if (gradle == null) { + cli.err().println("Unable to find the gradle executable, is it in your path?"); + return SOFTWARE; + } else { + String[] newArgs = prependArray(gradle.getAbsolutePath(), args); + return executeProcess(cli, newArgs, projectDirectory); + } + } + + public static String[] prependArray(String prepend, String[] args) { + String[] newArgs = new String[1 + args.length]; + newArgs[0] = prepend; + for (int i = 1; i < newArgs.length; i++) + newArgs[i] = args[i - 1]; + return newArgs; + } + + public static File findExecutableFile(String base) { + String path = null; + String executable = base; + + if (OS.determineOS() == OS.WINDOWS) { + executable = base + ".cmd"; + ; + path = findExecutable(executable); + if (path == null) { + executable = base + ".bat"; + path = findExecutable(executable); + } + } else { + executable = base; + path = findExecutable(executable); + } + if (path == null) + return null; + return new File(path, executable); + } + + private static int executeMaven(File projectDirectory, QuarkusCli cli, String... args) throws Exception { + File mvn = findExecutableFile("mvn"); + if (mvn == null) { + cli.err().println("Unable to find the maven executable, is it in your path?"); + return CommandLine.ExitCode.SOFTWARE; + } + + String[] newArgs = prependArray(mvn.getAbsolutePath(), args); + return executeProcess(cli, newArgs, projectDirectory); + } + + private static String findExecutable(String exec) { + Optional mvnPath = Stream.of(System.getenv("PATH").split(Pattern.quote(File.pathSeparator))) + .map(Paths::get) + .filter(path -> Files.exists(path.resolve(exec))).findFirst(); + + return mvnPath.map(value -> value.getParent().toString()).orElse(null); + } + + private static int executeWrapper(File wrapper, QuarkusCli cli, String... args) throws Exception { + String[] newArgs = prependArray(wrapper.getAbsolutePath(), args); + + File parentDir = wrapper.getParentFile(); + return executeProcess(cli, newArgs, parentDir); + } + + public static int executeProcess(QuarkusCli cli, String[] args, File parentDir) throws IOException, InterruptedException { + if (cli.isVerbose()) { + cli.out().println(String.join(" ", args)); + cli.out().println(); + } + + int exit = SOFTWARE; + if (cli.isManualOutput()) { + // manual output is for unit testing the cli + Process process = new ProcessBuilder() + .command(args) + .redirectErrorStream(true) + .directory(parentDir) + .start(); + InputStream stdIn = process.getInputStream(); + InputStreamReader isr = new InputStreamReader(stdIn); + BufferedReader br = new BufferedReader(isr); + + String line = null; + + while ((line = br.readLine()) != null) + cli.out().println(line); + + exit = process.waitFor(); + + } else { + Process process = new ProcessBuilder() + .command(args) + .inheritIO() + .directory(parentDir) + .start(); + + exit = process.waitFor(); + } + if (exit != 0) { + return SOFTWARE; + } else { + return OK; + } + } + + private static File getGradleWrapper(String projectPath) { + if (OS.determineOS() == OS.WINDOWS) { + File wrapper = new File(projectPath + File.separator + "gradlew.cmd"); + if (wrapper.isFile()) + return wrapper; + wrapper = new File(projectPath + File.separator + "gradlew.bat"); + if (wrapper.isFile()) + return wrapper; + } else { + File wrapper = new File(projectPath + File.separator + "gradlew"); + if (wrapper.isFile()) + return wrapper; + } + + return null; + } + + private static File getMavenWrapper(String projectPath) { + if (OS.determineOS() == OS.WINDOWS) { + File wrapper = new File(projectPath + File.separator + "mvnw.cmd"); + if (wrapper.isFile()) + return wrapper; + wrapper = new File(projectPath + File.separator + "mvnw.bat"); + if (wrapper.isFile()) + return wrapper; + } else { + File wrapper = new File(projectPath + File.separator + "mvnw"); + if (wrapper.isFile()) + return wrapper; + } + + return null; + } + + public static int executeGradleTarget(File projectPath, QuarkusCli cli, String... args) throws Exception { + File buildFile = projectPath.toPath().resolve("build.gradle").toFile(); + if (!buildFile.isFile()) { + cli.err().println("Was not able to find a build file in: " + projectPath); + return SOFTWARE; + } + String[] newArgs = args; + newArgs = propagatePropertyIfSet("maven.repo.local", newArgs); + newArgs = propagatePropertyIfSet(RegistriesConfigLocator.CONFIG_FILE_PATH_PROPERTY, newArgs); + newArgs = propagatePropertyIfSet("io.quarkus.maven.secondary-local-repo", newArgs); + if (cli.isShowErrors()) { + newArgs = prependArray("--full-stacktrace", newArgs); + } + if (CommandLine.Help.Ansi.AUTO.enabled()) + newArgs = prependArray("--console=rich", newArgs); + File wrapper = getGradleWrapper(projectPath.getAbsolutePath()); + if (wrapper != null) { + return executeWrapper(wrapper, cli, newArgs); + } else { + return executeGradle(projectPath, cli, args); + } + } + + private static String[] propagatePropertyIfSet(String name, String[] newArgs) { + final String value = System.getProperty(name); + return value == null ? newArgs : prependArray("-D" + name + "=" + value, newArgs); + } + + public static int executeMavenTarget(File projectPath, QuarkusCli cli, String... args) throws Exception { + File buildFile = projectPath.toPath().resolve("pom.xml").toFile(); + if (!buildFile.isFile()) { + cli.err().println("Was not able to find a build file in: " + projectPath); + return SOFTWARE; + } + File wrapper = getMavenWrapper(projectPath.getAbsolutePath()); + String[] newArgs = args; + if (cli.isShowErrors()) { + newArgs = prependArray("-e", newArgs); + } + if (CommandLine.Help.Ansi.AUTO.enabled()) + newArgs = prependArray("-Dstyle.color=always", newArgs); + if (wrapper != null) { + executeWrapper(wrapper, cli, newArgs); + } else { + return executeMaven(projectPath, cli, newArgs); + } + return CommandLine.ExitCode.OK; + } + + public static void outputBuildCommand(PrintWriter writer, BuildTool buildtool, java.util.List args) { + writer.print(buildtool == BuildTool.MAVEN ? "mvn" : "gradle"); + for (String arg : args) + writer.print(" " + arg); + } +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusCliVersion.java b/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusCliVersion.java new file mode 100644 index 0000000000000..60ad015fa716c --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusCliVersion.java @@ -0,0 +1,44 @@ +package io.quarkus.cli.core; + +import java.io.BufferedReader; +import java.io.IOException; +import java.net.URL; +import java.nio.file.Files; +import java.util.Properties; + +import io.smallrye.common.classloader.ClassPathUtils; +import picocli.CommandLine; + +public class QuarkusCliVersion implements CommandLine.IVersionProvider { + + private static String version; + + public static String version() { + if (version != null) { + return version; + } + final URL quarkusPropsUrl = Thread.currentThread().getContextClassLoader().getResource("quarkus.properties"); + if (quarkusPropsUrl == null) { + throw new RuntimeException("Failed to locate quarkus.properties on the classpath"); + } + final Properties props = new Properties(); + ClassPathUtils.consumeAsPath(quarkusPropsUrl, p -> { + try (BufferedReader reader = Files.newBufferedReader(p)) { + props.load(reader); + } catch (IOException e) { + throw new RuntimeException("Failed to load quarkus.properties", e); + } + }); + version = props.getProperty("quarkus-core-version"); + if (version == null) { + throw new RuntimeException("Failed to locate quarkus-core-version property in the bundled quarkus.properties"); + } + return version; + } + + @Override + public String[] getVersion() throws Exception { + return new String[] { version() }; + } + +} diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusVersion.java b/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusVersion.java new file mode 100644 index 0000000000000..e3ebfbc3c6dfb --- /dev/null +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/QuarkusVersion.java @@ -0,0 +1,15 @@ +package io.quarkus.cli.core; + +import java.nio.file.Paths; + +import io.quarkus.devtools.project.QuarkusProjectHelper; +import picocli.CommandLine; + +public class QuarkusVersion implements CommandLine.IVersionProvider { + + @Override + public String[] getVersion() throws Exception { + return new String[] { QuarkusProjectHelper.getProject(Paths.get("").normalize().toAbsolutePath()).getExtensionsCatalog() + .getQuarkusCoreVersion() }; + } +} diff --git a/devtools/cli/src/main/resources/application.properties b/devtools/cli/src/main/resources/application.properties new file mode 100644 index 0000000000000..da09c4ff8e2c2 --- /dev/null +++ b/devtools/cli/src/main/resources/application.properties @@ -0,0 +1,7 @@ +quarkus.log.level=WARN +quarkus.banner.enabled=false +quarkus.package.type=uber-jar +quarkus.native.additional-build-args=--allow-incomplete-classpath +#quarkus.native.auto-service-loader-registration=true +#quarkus.native.resources.includes=quarkus.properties,bundled-codestarts/**,codestarts/** +#quarkus.native.native-image-xmx=6g \ No newline at end of file diff --git a/devtools/cli/src/test/java/io/quarkus/cli/CliTest.java b/devtools/cli/src/test/java/io/quarkus/cli/CliTest.java new file mode 100644 index 0000000000000..7cd48eaa1fee4 --- /dev/null +++ b/devtools/cli/src/test/java/io/quarkus/cli/CliTest.java @@ -0,0 +1,376 @@ +package io.quarkus.cli; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Comparator; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import io.quarkus.cli.core.ExecuteUtil; +import io.quarkus.devtools.test.RegistryClientTestHelper; +import picocli.CommandLine; + +public class CliTest { + + private String screen; + private int exitCode; + + private String cwd; + private Path workspace; + + @BeforeAll + public static void globalSetup() { + RegistryClientTestHelper.enableRegistryClientTestConfig(); + } + + @AfterAll + public static void globalReset() { + RegistryClientTestHelper.disableRegistryClientTestConfig(); + } + + @BeforeEach + public void setupTestDirectories() throws Exception { + cwd = System.getProperty("user.dir"); + workspace = Paths.get(cwd).toAbsolutePath().resolve("target/cli-workspace"); + deleteDir(workspace); + Assertions.assertFalse(workspace.toFile().exists()); + Files.createDirectories(workspace); + System.setProperty("user.dir", workspace.toFile().getAbsolutePath()); + } + + @AfterEach + public void revertCWD() { + System.setProperty("user.dir", cwd); + } + + @Test + public void testCreateMavenDefaults() throws Exception { + Path project = workspace.resolve("code-with-quarkus"); + + execute("create"); + + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("Project code-with-quarkus created")); + + Assertions.assertTrue(project.resolve("pom.xml").toFile().exists()); + String pom = readString(project.resolve("pom.xml")); + Assertions.assertTrue(pom.contains("org.acme")); + Assertions.assertTrue(pom.contains("code-with-quarkus")); + Assertions.assertTrue(pom.contains("1.0.0-SNAPSHOT")); + } + + @Test + public void testAddListRemove() throws Exception { + Path project = workspace.resolve("code-with-quarkus"); + testCreateMavenDefaults(); + + execute("add"); + Assertions.assertEquals(CommandLine.ExitCode.USAGE, exitCode); + + // test not project dir + execute("add", "qute"); + Assertions.assertEquals(CommandLine.ExitCode.SOFTWARE, exitCode); + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.SOFTWARE, exitCode); + + System.out.println("Change user.dir to: " + project.toFile().getAbsolutePath()); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + + // test empty list + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertEquals("quarkus-resteasy", screen.trim()); + + // test add + execute("add", "qute"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + // test list + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("quarkus-resteasy")); + Assertions.assertTrue(screen.contains("quarkus-qute")); + + // test add multiple + execute("add", "amazon-lambda-http", "jackson"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + // test list + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("quarkus-resteasy")); + Assertions.assertTrue(screen.contains("quarkus-qute")); + Assertions.assertTrue(screen.contains("quarkus-amazon-lambda-http")); + Assertions.assertTrue(screen.contains("quarkus-jackson")); + + // test list installable + execute("list", "--installable"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-jackson")); + Assertions.assertTrue(screen.contains("quarkus-azure-functions-http")); + + // test list search installable + execute("list", "--installable", "--search=picocli"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertEquals("quarkus-picocli", screen.trim()); + + // test list search + execute("list", "--search=amazon"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertEquals("quarkus-amazon-lambda-http", screen.trim()); + + // test remove bad + execute("remove", "badbadbad"); + Assertions.assertEquals(CommandLine.ExitCode.SOFTWARE, exitCode); + + // test remove + execute("remove", "qute"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-qute")); + Assertions.assertTrue(screen.contains("quarkus-amazon-lambda-http")); + Assertions.assertTrue(screen.contains("quarkus-jackson")); + + // test remove many + execute("rm", "amazon-lambda-http", "jackson", "resteasy"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertEquals("", screen.trim()); + + } + + @Test + public void testGradleAddListRemove() throws Exception { + // Gradle list command cannot be screen captured with the current implementation + // so I will just test good return values + // + Path project = workspace.resolve("code-with-quarkus"); + testCreateGradleDefaults(); + + execute("add"); + Assertions.assertEquals(CommandLine.ExitCode.USAGE, exitCode); + + // test not project dir + execute("add", "resteasy"); + Assertions.assertEquals(CommandLine.ExitCode.SOFTWARE, exitCode); + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.SOFTWARE, exitCode); + + System.out.println("Change user.dir to: " + project.toFile().getAbsolutePath()); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + + // test empty list + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + // test add + execute("add", "resteasy"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + // test list + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("quarkus-resteasy")); + Assertions.assertFalse(screen.contains("quarkus-amazon-lambda-http")); + + // test add multiple + execute("add", "amazon-lambda-http", "jackson"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + // test list + + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("quarkus-resteasy")); + Assertions.assertTrue(screen.contains("quarkus-amazon-lambda-http")); + Assertions.assertTrue(screen.contains("quarkus-jackson")); + + // test list installable + execute("list", "--installable"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-jackson")); + Assertions.assertTrue(screen.contains("quarkus-azure-functions-http")); + + // test list search installable + execute("list", "--installable", "--search=picocli"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("quarkus-picocli")); + Assertions.assertFalse(screen.contains("quarkus-amazon-lambda-http")); + + // test list search + execute("list", "--search=amazon"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-picocli")); + Assertions.assertTrue(screen.contains("quarkus-amazon-lambda-http")); + + // test remove + execute("remove", "resteasy"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-resteasy")); + Assertions.assertTrue(screen.contains("quarkus-amazon-lambda-http")); + Assertions.assertTrue(screen.contains("quarkus-jackson")); + + // test remove many + execute("rm", "amazon-lambda-http", "jackson"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + execute("list"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertFalse(screen.contains("quarkus-resteasy")); + Assertions.assertFalse(screen.contains("quarkus-amazon-lambda-http")); + Assertions.assertFalse(screen.contains("quarkus-jackson")); + + } + + @Test + public void testCreateMaven() throws Exception { + Path project = workspace.resolve("my-rest-project"); + + execute("create", + "--group-id=com.acme", + "--artifact-id=my-rest-project", + "--version=4.2", + "--maven", + "resteasy"); + + Assertions.assertEquals(0, exitCode); + Assertions.assertTrue(screen.contains("Project my-rest-project created")); + + Assertions.assertTrue(project.resolve("pom.xml").toFile().exists()); + String pom = readString(project.resolve("pom.xml")); + Assertions.assertTrue(pom.contains("com.acme")); + Assertions.assertTrue(pom.contains("my-rest-project")); + Assertions.assertTrue(pom.contains("4.2")); + Assertions.assertTrue(pom.contains("quarkus-resteasy")); + } + + @Test + public void testCreateGradleDefaults() throws Exception { + Path project = workspace.resolve("code-with-quarkus"); + + execute("create", "--gradle"); + + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + Assertions.assertTrue(screen.contains("Project code-with-quarkus created")); + + Assertions.assertTrue(project.resolve("build.gradle").toFile().exists()); + String pom = readString(project.resolve("build.gradle")); + Assertions.assertTrue(pom.contains("group 'org.acme'")); + Assertions.assertTrue(pom.contains("version '1.0.0-SNAPSHOT'")); + Assertions.assertTrue(project.resolve("settings.gradle").toFile().exists()); + String settings = readString(project.resolve("settings.gradle")); + Assertions.assertTrue(settings.contains("code-with-quarkus")); + + } + + @Test + public void testGradleBuild() throws Exception { + + execute("create", "--gradle", "resteasy"); + + Path project = workspace.resolve("code-with-quarkus"); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + execute("build"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + execute("clean"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + } + + @Test + public void testMavenBuild() throws Exception { + + execute("create", "resteasy"); + + Path project = workspace.resolve("code-with-quarkus"); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + execute("build"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + execute("clean"); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + + } + + @Test + public void testCreateJBangRestEasy() throws Exception { + + execute("create-jbang", "--output-folder=my-jbang-project", "resteasy-jsonb"); + + Path project = workspace.resolve("my-jbang-project"); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + } + + @Test + public void testCreateJBangPicocli() throws Exception { + + execute("create-jbang", "--output-folder=my-jbang-project", "quarkus-picocli"); + + Path project = workspace.resolve("my-jbang-project"); + System.setProperty("user.dir", project.toFile().getAbsolutePath()); + Assertions.assertEquals(CommandLine.ExitCode.OK, exitCode); + } + + private void deleteDir(Path path) throws Exception { + if (!path.toFile().exists()) + return; + Files.walk(path) + .sorted(Comparator.reverseOrder()) + .map(Path::toFile) + .forEach(File::delete); + } + + private void execute(String... args) throws Exception { + System.out.print("$ quarkus"); + args = ExecuteUtil.prependArray("-e", args); + args = ExecuteUtil.prependArray("--verbose", args); + args = ExecuteUtil.prependArray("--manual-output", args); + for (String arg : args) + System.out.print(" " + arg); + System.out.println(); + PrintStream stdout = System.out; + PrintStream stderr = System.err; + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + PrintStream ps = new PrintStream(baos); + System.setOut(ps); + System.setErr(ps); + + QuarkusCli cli = new QuarkusCli(); + try { + exitCode = cli.run(args); + ps.flush(); + } finally { + System.setOut(stdout); + System.setErr(stderr); + } + screen = baos.toString(); + System.out.println(screen); + } + + private String readString(Path path) throws Exception { + return new String(Files.readAllBytes(path)); + } +} diff --git a/devtools/gradle/.gitignore b/devtools/gradle/.gitignore new file mode 100644 index 0000000000000..b74cd93b4902c --- /dev/null +++ b/devtools/gradle/.gitignore @@ -0,0 +1,20 @@ +# Created by https://www.toptal.com/developers/gitignore/api/gradle +# Edit at https://www.toptal.com/developers/gitignore?templates=gradle + +### Gradle ### +.gradle +build/ + +# Ignore Gradle GUI config +gradle-app.setting + +# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) +!gradle-wrapper.jar + +# Cache of project +.gradletasknamecache + +### Gradle Patch ### +**/build/ + +# End of https://www.toptal.com/developers/gitignore/api/gradle diff --git a/devtools/gradle/build.gradle b/devtools/gradle/build.gradle index 277ac086a64de..e55bfd1ad8d18 100644 --- a/devtools/gradle/build.gradle +++ b/devtools/gradle/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'com.gradle.plugin-publish' version '0.12.0' + id 'com.gradle.plugin-publish' version '0.13.0' id 'java-gradle-plugin' } if (JavaVersion.current().isJava9Compatible()) { @@ -16,33 +16,38 @@ compileTestJava { } repositories { - mavenLocal() + // in case a custom local repo is configured we are going to use that instead of the default mavenLocal() + if (System.properties.containsKey('maven.repo.local')) { + maven { + url System.properties.get('maven.repo.local') + } + } else { + mavenLocal() + } mavenCentral() } -configurations.all { - exclude group: 'io.quarkus', module: 'quarkus-bootstrap-maven-resolver' -} - dependencies { api gradleApi() implementation "io.quarkus:quarkus-bootstrap-core:${version}" implementation "io.quarkus:quarkus-devtools-common:${version}" implementation "io.quarkus:quarkus-platform-descriptor-json:${version}" - implementation "io.quarkus:quarkus-platform-descriptor-resolver-json:${version}" implementation "io.quarkus:quarkus-core-deployment:${version}" - testImplementation 'org.mockito:mockito-core:3.4.4' - testImplementation 'org.assertj:assertj-core:3.16.1' - testImplementation 'org.junit.jupiter:junit-jupiter:5.6.2' + testImplementation 'org.mockito:mockito-core:3.8.0' + testImplementation 'org.assertj:assertj-core:3.19.0' + testImplementation 'org.junit.jupiter:junit-jupiter:5.7.1' testImplementation 'org.awaitility:awaitility:4.0.3' - testImplementation 'org.jprocesses:jProcesses:1.6.5' testImplementation "io.quarkus:quarkus-devmode-test-utils:${version}" testImplementation gradleTestKit() } test { + // propagate the custom local maven repo, in case it's configured + if (System.properties.containsKey('maven.repo.local')) { + systemProperty 'maven.repo.local', System.properties.get('maven.repo.local') + } testLogging { events "passed", "skipped", "failed" } @@ -54,9 +59,9 @@ javadoc { } pluginBundle { - website = 'https://quarkus.io/' - vcsUrl = 'https://github.com/quarkusio/quarkus' - tags = ['quarkus', 'quarkusio', 'graalvm'] + website = 'https://quarkus.io/' + vcsUrl = 'https://github.com/quarkusio/quarkus' + tags = ['quarkus', 'quarkusio', 'graalvm'] } gradlePlugin { diff --git a/devtools/gradle/gradle/wrapper/gradle-wrapper.jar b/devtools/gradle/gradle/wrapper/gradle-wrapper.jar index 62d4c053550b9..e708b1c023ec8 100644 Binary files a/devtools/gradle/gradle/wrapper/gradle-wrapper.jar and b/devtools/gradle/gradle/wrapper/gradle-wrapper.jar differ diff --git a/devtools/gradle/gradle/wrapper/gradle-wrapper.properties b/devtools/gradle/gradle/wrapper/gradle-wrapper.properties index ac33e9944a65e..8cf6eb5ad222e 100644 --- a/devtools/gradle/gradle/wrapper/gradle-wrapper.properties +++ b/devtools/gradle/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/devtools/gradle/gradlew b/devtools/gradle/gradlew index fbd7c515832da..4f906e0c811fc 100755 --- a/devtools/gradle/gradlew +++ b/devtools/gradle/gradlew @@ -130,7 +130,7 @@ fi if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath diff --git a/devtools/gradle/gradlew.bat b/devtools/gradle/gradlew.bat index a9f778a7a964b..ac1b06f93825d 100644 --- a/devtools/gradle/gradlew.bat +++ b/devtools/gradle/gradlew.bat @@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -54,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -64,21 +64,6 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line @@ -86,7 +71,7 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell diff --git a/devtools/gradle/pom.xml b/devtools/gradle/pom.xml index 42c2118864d55..cfc5f3bf44de1 100644 --- a/devtools/gradle/pom.xml +++ b/devtools/gradle/pom.xml @@ -45,10 +45,6 @@ io.quarkus quarkus-platform-descriptor-json - - io.quarkus - quarkus-platform-descriptor-resolver-json - org.awaitility awaitility @@ -80,11 +76,13 @@ clean ${gradle.task} -Pdescription=${project.description} + -Dmaven.repo.local=${settings.localRepository} -S --stacktrace + --no-daemon - ${settings.localRepository} + ${settings.localRepository} ${env.MAVEN_OPTS} ${skip.gradle.build} @@ -196,5 +194,17 @@ assemble + + + quick-build-ci + + + quickly-ci + + + + assemble + + diff --git a/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleGroovyProjectBuildFile.java b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleGroovyProjectBuildFile.java new file mode 100644 index 0000000000000..66a9f7449076d --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleGroovyProjectBuildFile.java @@ -0,0 +1,42 @@ +package io.quarkus.devtools.project.buildfile; + +import org.gradle.api.Project; + +import io.quarkus.bootstrap.model.AppArtifactCoords; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.registry.catalog.ExtensionCatalog; + +public class GradleGroovyProjectBuildFile extends GradleProjectBuildFile { + + static final String BUILD_GRADLE_PATH = "build.gradle"; + static final String SETTINGS_GRADLE_PATH = "settings.gradle"; + + public GradleGroovyProjectBuildFile(Project project, ExtensionCatalog catalog) { + super(project, catalog); + } + + @Override + String getSettingsGradlePath() { + return SETTINGS_GRADLE_PATH; + } + + @Override + String getBuildGradlePath() { + return BUILD_GRADLE_PATH; + } + + @Override + protected boolean addDependency(AppArtifactCoords coords, boolean managed) { + return addDependencyInModel(getModel(), coords, managed); + } + + @Override + public BuildTool getBuildTool() { + return BuildTool.GRADLE; + } + + static boolean addDependencyInModel(Model model, AppArtifactCoords coords, boolean managed) { + return addDependencyInModel(model, + String.format(" implementation %s%n", createDependencyCoordinatesString(coords, managed, '\''))); + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleKotlinProjectBuildFile.java b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleKotlinProjectBuildFile.java new file mode 100644 index 0000000000000..4ca9db743ea10 --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleKotlinProjectBuildFile.java @@ -0,0 +1,42 @@ +package io.quarkus.devtools.project.buildfile; + +import org.gradle.api.Project; + +import io.quarkus.bootstrap.model.AppArtifactCoords; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.registry.catalog.ExtensionCatalog; + +public class GradleKotlinProjectBuildFile extends GradleProjectBuildFile { + + static final String BUILD_GRADLE_PATH = "build.gradle.kts"; + static final String SETTINGS_GRADLE_PATH = "settings.gradle.kts"; + + public GradleKotlinProjectBuildFile(Project project, ExtensionCatalog catalog) { + super(project, catalog); + } + + @Override + String getSettingsGradlePath() { + return SETTINGS_GRADLE_PATH; + } + + @Override + String getBuildGradlePath() { + return BUILD_GRADLE_PATH; + } + + @Override + protected boolean addDependency(AppArtifactCoords coords, boolean managed) { + return addDependencyInModel(getModel(), coords, managed); + } + + @Override + public BuildTool getBuildTool() { + return BuildTool.GRADLE_KOTLIN_DSL; + } + + static boolean addDependencyInModel(Model model, AppArtifactCoords coords, boolean managed) { + return addDependencyInModel(model, + String.format(" implementation(%s)%n", createDependencyCoordinatesString(coords, managed, '"'))); + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleProjectBuildFile.java b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleProjectBuildFile.java new file mode 100644 index 0000000000000..2ae31c87f2d61 --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/devtools/project/buildfile/GradleProjectBuildFile.java @@ -0,0 +1,63 @@ +package io.quarkus.devtools.project.buildfile; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.Dependency; +import org.gradle.api.artifacts.ModuleDependency; +import org.gradle.api.artifacts.ResolvedArtifact; +import org.gradle.api.attributes.Category; +import org.gradle.api.plugins.JavaPlugin; + +import io.quarkus.bootstrap.model.AppArtifactCoords; +import io.quarkus.registry.catalog.ExtensionCatalog; + +public abstract class GradleProjectBuildFile extends AbstractGradleBuildFile { + + private final Project project; + + public GradleProjectBuildFile(Project project, ExtensionCatalog catalog) { + super(project.getProjectDir().toPath(), catalog, + project.getParent() != null ? project.getRootProject().getProjectDir().toPath() + : project.getProjectDir().toPath()); + this.project = project; + } + + @Override + protected List getDependencies() throws IOException { + + final Set boms = new HashSet<>(); + // collect enforced platforms + final Configuration impl = project.getConfigurations() + .getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); + for (Dependency d : impl.getAllDependencies()) { + if (!(d instanceof ModuleDependency)) { + continue; + } + final ModuleDependency module = (ModuleDependency) d; + final Category category = module.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE); + if (category != null && (Category.ENFORCED_PLATFORM.equals(category.getName()) + || Category.REGULAR_PLATFORM.equals(category.getName()))) { + boms.add(d); + } + } + + final Set resolvedArtifacts = project.getConfigurations() + .getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME).getResolvedConfiguration() + .getResolvedArtifacts(); + final List coords = new ArrayList<>(boms.size() + resolvedArtifacts.size()); + boms.forEach(d -> { + coords.add(new AppArtifactCoords(d.getGroup(), d.getName(), null, "pom", d.getVersion())); + }); + resolvedArtifacts.forEach(a -> { + coords.add(new AppArtifactCoords(a.getModuleVersion().getId().getGroup(), a.getName(), + a.getClassifier(), a.getExtension(), a.getModuleVersion().getId().getVersion())); + }); + return coords; + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/AppModelGradleResolver.java b/devtools/gradle/src/main/java/io/quarkus/gradle/AppModelGradleResolver.java index 501932d1fd348..4760563d618ae 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/AppModelGradleResolver.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/AppModelGradleResolver.java @@ -7,6 +7,7 @@ import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ResolveException; import org.gradle.api.artifacts.ResolvedArtifact; import org.gradle.api.artifacts.ResolvedConfiguration; import org.gradle.api.internal.artifacts.dependencies.DefaultDependencyArtifact; @@ -35,12 +36,23 @@ public AppModelGradleResolver(Project project, QuarkusModel model) { @Override public String getLatestVersion(AppArtifact appArtifact, String upToVersion, boolean inclusive) throws AppModelResolverException { - throw new UnsupportedOperationException(); + try { + return resolveArtifact(new AppArtifact(appArtifact.getGroupId(), appArtifact.getArtifactId(), + appArtifact.getClassifier(), appArtifact.getType(), + "[" + appArtifact.getVersion() + "," + upToVersion + (inclusive ? "]" : ")"))).getVersion(); + } catch (AppModelResolverException e) { + return null; + } } @Override public String getLatestVersionFromRange(AppArtifact appArtifact, String range) throws AppModelResolverException { - throw new UnsupportedOperationException(); + try { + return resolveArtifact(new AppArtifact(appArtifact.getGroupId(), appArtifact.getArtifactId(), + appArtifact.getClassifier(), appArtifact.getType(), range)).getVersion(); + } catch (AppModelResolverException e) { + return null; + } } @Override @@ -63,39 +75,52 @@ public void relink(AppArtifact appArtifact, Path localPath) throws AppModelResol @Override public Path resolve(AppArtifact appArtifact) throws AppModelResolverException { - if (!appArtifact.isResolved()) { - final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); - dep.setExtension(appArtifact.getType()); - dep.setType(appArtifact.getType()); - dep.setName(appArtifact.getArtifactId()); - if (appArtifact.getClassifier() != null) { - dep.setClassifier(appArtifact.getClassifier()); - } + return resolveArtifact(appArtifact).getPaths().getSinglePath(); + } - final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(appArtifact.getGroupId(), - appArtifact.getArtifactId(), appArtifact.getVersion(), null); - gradleDep.addArtifact(dep); - - final Configuration detachedConfig = project.getConfigurations().detachedConfiguration(gradleDep); - - final ResolvedConfiguration rc = detachedConfig.getResolvedConfiguration(); - Set resolvedArtifacts = rc.getResolvedArtifacts(); - for (ResolvedArtifact a : resolvedArtifacts) { - if (appArtifact.getArtifactId().equals(a.getName()) - && appArtifact.getType().equals(a.getType()) - && (a.getClassifier() == null ? appArtifact.getClassifier() == null - : a.getClassifier().equals(appArtifact.getClassifier())) - && appArtifact.getGroupId().equals(a.getModuleVersion().getId().getGroup())) { - appArtifact.setPath(a.getFile().toPath()); - } - } + private AppArtifact resolveArtifact(AppArtifact appArtifact) throws AppModelResolverException { + if (appArtifact.isResolved()) { + return appArtifact; + } + final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); + dep.setExtension(appArtifact.getType()); + dep.setType(appArtifact.getType()); + dep.setName(appArtifact.getArtifactId()); + if (appArtifact.getClassifier() != null) { + dep.setClassifier(appArtifact.getClassifier()); + } + + final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(appArtifact.getGroupId(), + appArtifact.getArtifactId(), appArtifact.getVersion(), null); + gradleDep.addArtifact(dep); - if (!appArtifact.isResolved()) { - throw new AppModelResolverException("Failed to resolve " + appArtifact); + final Configuration detachedConfig = project.getConfigurations().detachedConfiguration(gradleDep); + + final ResolvedConfiguration rc = detachedConfig.getResolvedConfiguration(); + Set resolvedArtifacts; + try { + resolvedArtifacts = rc.getResolvedArtifacts(); + } catch (ResolveException e) { + throw new AppModelResolverException("Failed to resolve " + appArtifact, e); + } + for (ResolvedArtifact a : resolvedArtifacts) { + if (appArtifact.getArtifactId().equals(a.getName()) && appArtifact.getType().equals(a.getType()) + && (a.getClassifier() == null ? appArtifact.getClassifier() == null + : a.getClassifier().equals(appArtifact.getClassifier())) + && appArtifact.getGroupId().equals(a.getModuleVersion().getId().getGroup())) { + if (!appArtifact.getVersion().equals(a.getModuleVersion().getId().getVersion())) { + appArtifact = new AppArtifact(appArtifact.getGroupId(), appArtifact.getArtifactId(), + appArtifact.getClassifier(), appArtifact.getType(), a.getModuleVersion().getId().getVersion()); + } + appArtifact.setPath(a.getFile().toPath()); + break; } + } + if (!appArtifact.isResolved()) { + throw new AppModelResolverException("Failed to resolve " + appArtifact); } - return appArtifact.getPath(); + return appArtifact; } @Override @@ -128,5 +153,4 @@ public AppModel resolveManagedModel(AppArtifact appArtifact, List throws AppModelResolverException { return resolveModel(appArtifact); } - } diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/GradleBuildFileFromConnector.java b/devtools/gradle/src/main/java/io/quarkus/gradle/GradleBuildFileFromConnector.java deleted file mode 100644 index 4b3e864bdbbe6..0000000000000 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/GradleBuildFileFromConnector.java +++ /dev/null @@ -1,73 +0,0 @@ -package io.quarkus.gradle; - -import java.io.IOException; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import org.apache.maven.model.Dependency; -import org.gradle.tooling.BuildException; -import org.gradle.tooling.GradleConnector; -import org.gradle.tooling.ProjectConnection; -import org.gradle.tooling.model.eclipse.EclipseExternalDependency; -import org.gradle.tooling.model.eclipse.EclipseProject; - -import io.quarkus.devtools.project.buildfile.AbstractGradleBuildFile; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; - -public class GradleBuildFileFromConnector extends AbstractGradleBuildFile { - - private List dependencies = null; - - public GradleBuildFileFromConnector(final Path projectDirPath, final QuarkusPlatformDescriptor platformDescriptor) { - super(projectDirPath, platformDescriptor); - } - - public GradleBuildFileFromConnector(Path projectDirPath, QuarkusPlatformDescriptor platformDescriptor, - Path rootProjectPath) { - super(projectDirPath, platformDescriptor, rootProjectPath); - } - - @Override - public List getDependencies() throws IOException { - if (dependencies == null) { - EclipseProject eclipseProject = null; - if (getBuildContent() != null) { - try { - ProjectConnection connection = GradleConnector.newConnector() - .forProjectDirectory(getProjectDirPath().toFile()) - .connect(); - eclipseProject = connection.getModel(EclipseProject.class); - } catch (BuildException e) { - // ignore this error. - e.printStackTrace(); - } - } - if (eclipseProject != null) { - dependencies = eclipseProject.getClasspath().stream().map(this::gradleModuleVersionToDependency) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } else { - dependencies = Collections.emptyList(); - } - dependencies = Collections.unmodifiableList(dependencies); - } - return dependencies; - } - - private Dependency gradleModuleVersionToDependency(EclipseExternalDependency eed) { - Dependency dependency = new Dependency(); - if (eed == null || eed.getGradleModuleVersion() == null) { - // local dependencies are ignored - System.err.println("Found null dependency:" + eed); - return null; - } - dependency.setGroupId(eed.getGradleModuleVersion().getGroup()); - dependency.setArtifactId(eed.getGradleModuleVersion().getName()); - dependency.setVersion(eed.getGradleModuleVersion().getVersion()); - return dependency; - } - -} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPlugin.java b/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPlugin.java index c4568f8c394df..1db077756ba4b 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPlugin.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPlugin.java @@ -16,7 +16,6 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; -import org.gradle.api.UnknownTaskException; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.ConfigurationContainer; import org.gradle.api.artifacts.ProjectDependency; @@ -32,12 +31,15 @@ import org.gradle.util.GradleVersion; import io.quarkus.gradle.builder.QuarkusModelBuilder; +import io.quarkus.gradle.extension.QuarkusPluginExtension; +import io.quarkus.gradle.extension.SourceSetExtension; import io.quarkus.gradle.tasks.QuarkusAddExtension; import io.quarkus.gradle.tasks.QuarkusBuild; import io.quarkus.gradle.tasks.QuarkusDev; +import io.quarkus.gradle.tasks.QuarkusGenerateCode; import io.quarkus.gradle.tasks.QuarkusGenerateConfig; import io.quarkus.gradle.tasks.QuarkusListExtensions; -import io.quarkus.gradle.tasks.QuarkusPrepare; +import io.quarkus.gradle.tasks.QuarkusListPlatforms; import io.quarkus.gradle.tasks.QuarkusRemoteDev; import io.quarkus.gradle.tasks.QuarkusRemoveExtension; import io.quarkus.gradle.tasks.QuarkusTestConfig; @@ -50,14 +52,16 @@ public class QuarkusPlugin implements Plugin { public static final String EXTENSION_NAME = "quarkus"; public static final String LIST_EXTENSIONS_TASK_NAME = "listExtensions"; + public static final String LIST_PLATFORMS_TASK_NAME = "listPlatforms"; public static final String ADD_EXTENSION_TASK_NAME = "addExtension"; public static final String REMOVE_EXTENSION_TASK_NAME = "removeExtension"; - public static final String QUARKUS_PREPARE_TASK_NAME = "quarkusPrepare"; - public static final String QUARKUS_PREPARE_TESTS_TASK_NAME = "quarkusPrepareTests"; + public static final String QUARKUS_GENERATE_CODE_TASK_NAME = "quarkusGenerateCode"; + public static final String QUARKUS_GENERATE_CODE_TESTS_TASK_NAME = "quarkusGenerateCodeTests"; public static final String QUARKUS_BUILD_TASK_NAME = "quarkusBuild"; public static final String GENERATE_CONFIG_TASK_NAME = "generateConfig"; public static final String QUARKUS_DEV_TASK_NAME = "quarkusDev"; public static final String QUARKUS_REMOTE_DEV_TASK_NAME = "quarkusRemoteDev"; + public static final String DEV_MODE_CONFIGURATION_NAME = "quarkusDev"; @Deprecated public static final String BUILD_NATIVE_TASK_NAME = "buildNative"; @@ -88,20 +92,21 @@ public void apply(Project project) { registerTasks(project, quarkusExt); } - @SuppressWarnings("Convert2Lambda") private void registerTasks(Project project, QuarkusPluginExtension quarkusExt) { TaskContainer tasks = project.getTasks(); tasks.create(LIST_EXTENSIONS_TASK_NAME, QuarkusListExtensions.class); + tasks.create(LIST_PLATFORMS_TASK_NAME, QuarkusListPlatforms.class); tasks.create(ADD_EXTENSION_TASK_NAME, QuarkusAddExtension.class); tasks.create(REMOVE_EXTENSION_TASK_NAME, QuarkusRemoveExtension.class); tasks.create(GENERATE_CONFIG_TASK_NAME, QuarkusGenerateConfig.class); - QuarkusPrepare quarkusPrepare = tasks.create(QUARKUS_PREPARE_TASK_NAME, QuarkusPrepare.class); - QuarkusPrepare quarkusPrepareTests = tasks.create(QUARKUS_PREPARE_TESTS_TASK_NAME, QuarkusPrepare.class); - quarkusPrepareTests.setTest(true); + QuarkusGenerateCode quarkusGenerateCode = tasks.create(QUARKUS_GENERATE_CODE_TASK_NAME, QuarkusGenerateCode.class); + QuarkusGenerateCode quarkusGenerateCodeTests = tasks.create(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME, + QuarkusGenerateCode.class); + quarkusGenerateCodeTests.setTest(true); Task quarkusBuild = tasks.create(QUARKUS_BUILD_TASK_NAME, QuarkusBuild.class); - quarkusBuild.dependsOn(quarkusPrepare); + quarkusBuild.dependsOn(quarkusGenerateCode); Task quarkusDev = tasks.create(QUARKUS_DEV_TASK_NAME, QuarkusDev.class); Task quarkusRemoteDev = tasks.create(QUARKUS_REMOTE_DEV_TASK_NAME, QuarkusRemoteDev.class); tasks.create(QUARKUS_TEST_CONFIG_TASK_NAME, QuarkusTestConfig.class); @@ -135,22 +140,24 @@ public void execute(Task test) { JavaPlugin.class, javaPlugin -> { project.afterEvaluate(this::afterEvaluate); + ConfigurationContainer configurations = project.getConfigurations(); JavaCompile compileJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME); - compileJavaTask.mustRunAfter(quarkusPrepare); - quarkusPrepare.setSourceRegistrar(compileJavaTask::source); - - try { - // TODO: support kotlin - tasks.getByName("compileTestKotlin"); - } catch (UnknownTaskException noKotlin) { - JavaCompile compileTestJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_TEST_JAVA_TASK_NAME); - compileTestJavaTask.dependsOn(quarkusPrepareTests); - quarkusPrepareTests.setSourceRegistrar(compileTestJavaTask::source); - } + + // By default, gradle looks for annotation processors in the annotationProcessor configuration. + // This configure the compile task to look for annotation processors in the compileClasspath. + compileJavaTask.getOptions().setAnnotationProcessorPath( + configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME)); + + compileJavaTask.dependsOn(quarkusGenerateCode); + quarkusGenerateCode.setSourceRegistrar(compileJavaTask::source); + + JavaCompile compileTestJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_TEST_JAVA_TASK_NAME); + compileTestJavaTask.dependsOn(quarkusGenerateCodeTests); + quarkusGenerateCodeTests.setSourceRegistrar(compileTestJavaTask::source); Task classesTask = tasks.getByName(JavaPlugin.CLASSES_TASK_NAME); Task resourcesTask = tasks.getByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); - quarkusDev.dependsOn(classesTask, resourcesTask, quarkusPrepare); + quarkusDev.dependsOn(classesTask, resourcesTask, quarkusGenerateCode); quarkusRemoteDev.dependsOn(classesTask, resourcesTask); quarkusBuild.dependsOn(classesTask, resourcesTask, tasks.getByName(JavaPlugin.JAR_TASK_NAME)); @@ -160,8 +167,8 @@ public void execute(Task test) { SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME); - quarkusPrepare.setSourcesDirectories(getSourcesParents(mainSourceSet)); - quarkusPrepareTests.setSourcesDirectories(getSourcesParents(testSourceSet)); + quarkusGenerateCode.setSourcesDirectories(getSourcesParents(mainSourceSet)); + quarkusGenerateCodeTests.setSourcesDirectories(getSourcesParents(testSourceSet)); nativeTestSourceSet.setCompileClasspath( nativeTestSourceSet.getCompileClasspath() @@ -173,8 +180,12 @@ public void execute(Task test) { .plus(mainSourceSet.getOutput()) .plus(testSourceSet.getOutput())); + // create a custom configuration for devmode + configurations.create(DEV_MODE_CONFIGURATION_NAME).extendsFrom( + configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME), + configurations.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); + // create a custom configuration to be used for the dependencies of the testNative task - ConfigurationContainer configurations = project.getConfigurations(); configurations.maybeCreate(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME) .extendsFrom(configurations.findByName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME)); configurations.maybeCreate(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME) @@ -186,7 +197,29 @@ public void execute(Task test) { tasks.withType(Test.class).forEach(configureTestTask); tasks.withType(Test.class).whenTaskAdded(configureTestTask::accept); + + sourceSets.create(QuarkusGenerateCode.QUARKUS_GENERATED_SOURCES).getOutput() + .dir(QuarkusGenerateCode.QUARKUS_GENERATED_SOURCES); + sourceSets.create(QuarkusGenerateCode.QUARKUS_TEST_GENERATED_SOURCES).getOutput() + .dir(QuarkusGenerateCode.QUARKUS_TEST_GENERATED_SOURCES); }); + + project.getPlugins().withId("org.jetbrains.kotlin.jvm", plugin -> { + tasks.getByName("compileKotlin").dependsOn(quarkusGenerateCode); + tasks.getByName("compileTestKotlin").dependsOn(quarkusGenerateCodeTests); + + // Register the quarkus-generated-code + SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) + .getSourceSets(); + SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); + SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME); + SourceSet generatedSourceSet = sourceSets.getByName(QuarkusGenerateCode.QUARKUS_GENERATED_SOURCES); + SourceSet generatedTestSourceSet = sourceSets.getByName(QuarkusGenerateCode.QUARKUS_TEST_GENERATED_SOURCES); + for (String provider : QuarkusGenerateCode.CODE_GENERATION_PROVIDER) { + mainSourceSet.getJava().srcDir(new File(generatedSourceSet.getJava().getOutputDir(), provider)); + testSourceSet.getJava().srcDir(new File(generatedTestSourceSet.getJava().getOutputDir(), provider)); + } + }); } private Set getSourcesParents(SourceSet mainSourceSet) { @@ -220,13 +253,37 @@ private void configureBuildNativeTask(Project project) { private void afterEvaluate(Project project) { final HashSet visited = new HashSet<>(); - project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) + ConfigurationContainer configurations = project.getConfigurations(); + configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) .getIncoming().getDependencies() .forEach(d -> { if (d instanceof ProjectDependency) { visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited); } }); + + SourceSetExtension sourceSetExtension = project.getExtensions().getByType(QuarkusPluginExtension.class) + .sourceSetExtension(); + + if (sourceSetExtension.extraNativeTest() != null) { + + SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) + .getSourceSets(); + SourceSet nativeTestSourceSets = sourceSets.getByName(NATIVE_TEST_SOURCE_SET_NAME); + nativeTestSourceSets.setCompileClasspath( + nativeTestSourceSets.getCompileClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); + nativeTestSourceSets.setRuntimeClasspath( + nativeTestSourceSets.getRuntimeClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); + + configurations.findByName(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME).extendsFrom( + configurations.findByName(sourceSetExtension.extraNativeTest().getImplementationConfigurationName())); + configurations.findByName(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME).extendsFrom( + configurations.findByName(sourceSetExtension.extraNativeTest().getRuntimeOnlyConfigurationName())); + + QuarkusTestNative nativeTest = (QuarkusTestNative) project.getTasks().getByName(TEST_NATIVE_TASK_NAME); + nativeTest.setTestClassesDirs(nativeTestSourceSets.getOutput().getClassesDirs()); + nativeTest.setClasspath(nativeTestSourceSets.getRuntimeClasspath()); + } } private void visitProjectDep(Project project, Project dep, Set visited) { @@ -249,8 +306,8 @@ private void setupQuarkusBuildTaskDeps(Project project, Project dep, Set if (quarkusBuild != null) { final Task jarTask = dep.getTasks().findByName(JavaPlugin.JAR_TASK_NAME); if (jarTask != null) { - final Task quarkusPrepare = project.getTasks().findByName(QUARKUS_PREPARE_TASK_NAME); - final Task quarkusPrepareTests = project.getTasks().findByName(QUARKUS_PREPARE_TESTS_TASK_NAME); + final Task quarkusPrepare = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TASK_NAME); + final Task quarkusPrepareTests = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME); quarkusBuild.dependsOn(jarTask); if (quarkusPrepare != null) { quarkusPrepare.dependsOn(jarTask); @@ -267,6 +324,11 @@ private void setupQuarkusBuildTaskDeps(Project project, Project dep, Set if (resourcesTask != null) { quarkusDev.dependsOn(resourcesTask); } + final Task resourcesTaskJandex = dep.getTasks().findByName("jandex"); + if (resourcesTaskJandex != null) { + quarkusDev.dependsOn(resourcesTaskJandex); + } + } final Configuration compileConfig = dep.getConfigurations().findByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME); diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPluginExtension.java b/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPluginExtension.java deleted file mode 100644 index 900a2bffe3395..0000000000000 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/QuarkusPluginExtension.java +++ /dev/null @@ -1,202 +0,0 @@ -package io.quarkus.gradle; - -import java.io.File; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Map; -import java.util.Set; - -import org.gradle.api.Project; -import org.gradle.api.file.FileCollection; -import org.gradle.api.file.RegularFile; -import org.gradle.api.plugins.Convention; -import org.gradle.api.plugins.JavaPlugin; -import org.gradle.api.plugins.JavaPluginConvention; -import org.gradle.api.provider.Provider; -import org.gradle.api.tasks.SourceSet; -import org.gradle.api.tasks.testing.Test; -import org.gradle.jvm.tasks.Jar; - -import io.quarkus.bootstrap.BootstrapConstants; -import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.bootstrap.model.AppModel; -import io.quarkus.bootstrap.resolver.AppModelResolver; -import io.quarkus.bootstrap.resolver.model.ModelParameter; -import io.quarkus.bootstrap.resolver.model.QuarkusModel; -import io.quarkus.bootstrap.resolver.model.impl.ModelParameterImpl; -import io.quarkus.gradle.builder.QuarkusModelBuilder; -import io.quarkus.gradle.tasks.QuarkusGradleUtils; -import io.quarkus.runtime.LaunchMode; - -public class QuarkusPluginExtension { - - private final Project project; - - private File outputDirectory; - - private String finalName; - - private File sourceDir; - - private File workingDir; - - private File outputConfigDirectory; - - public QuarkusPluginExtension(Project project) { - this.project = project; - } - - void beforeTest(Test task) { - try { - final Map props = task.getSystemProperties(); - - final AppModel appModel = getAppModelResolver(LaunchMode.TEST) - .resolveModel(getAppArtifact()); - final Path serializedModel = QuarkusGradleUtils.serializeAppModel(appModel, task); - props.put(BootstrapConstants.SERIALIZED_APP_MODEL, serializedModel.toString()); - - final String nativeRunner = task.getProject().getBuildDir().toPath().resolve(finalName() + "-runner") - .toAbsolutePath() - .toString(); - props.put("native.image.path", nativeRunner); - } catch (Exception e) { - throw new IllegalStateException("Failed to resolve deployment classpath", e); - } - } - - public Path appJarOrClasses() { - final Jar jarTask = (Jar) project.getTasks().findByName(JavaPlugin.JAR_TASK_NAME); - if (jarTask == null) { - throw new RuntimeException("Failed to locate task 'jar' in the project."); - } - final Provider jarProvider = jarTask.getArchiveFile(); - Path classesDir = null; - if (jarProvider.isPresent()) { - final File f = jarProvider.get().getAsFile(); - if (f.exists()) { - classesDir = f.toPath(); - } - } - if (classesDir == null) { - final Convention convention = project.getConvention(); - JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class); - if (javaConvention != null) { - final SourceSet mainSourceSet = javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME); - final String classesPath = QuarkusGradleUtils.getClassesDir(mainSourceSet, jarTask.getTemporaryDir()); - if (classesPath != null) { - classesDir = Paths.get(classesPath); - } - } - } - if (classesDir == null) { - throw new RuntimeException("Failed to locate project's classes directory"); - } - return classesDir; - } - - public File outputDirectory() { - if (outputDirectory == null) { - outputDirectory = getLastFile(project.getConvention().getPlugin(JavaPluginConvention.class) - .getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput().getClassesDirs()); - } - return outputDirectory; - } - - public void setOutputDirectory(String outputDirectory) { - this.outputDirectory = new File(outputDirectory); - } - - public File outputConfigDirectory() { - if (outputConfigDirectory == null) { - outputConfigDirectory = project.getConvention().getPlugin(JavaPluginConvention.class) - .getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput().getResourcesDir(); - } - return outputConfigDirectory; - } - - public void setOutputConfigDirectory(String outputConfigDirectory) { - this.outputConfigDirectory = new File(outputConfigDirectory); - } - - public File sourceDir() { - if (sourceDir == null) { - sourceDir = getLastFile(project.getConvention().getPlugin(JavaPluginConvention.class) - .getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getAllJava().getSourceDirectories()); - } - return sourceDir; - } - - public void setSourceDir(String sourceDir) { - this.sourceDir = new File(sourceDir); - } - - public File workingDir() { - if (workingDir == null) { - workingDir = outputDirectory(); - } - return workingDir; - } - - public void setWorkingDir(String workingDir) { - this.workingDir = new File(workingDir); - } - - public String finalName() { - if (finalName == null || finalName.length() == 0) { - this.finalName = String.format("%s-%s", project.getName(), project.getVersion()); - } - return finalName; - } - - public void setFinalName(String finalName) { - this.finalName = finalName; - } - - public Set resourcesDir() { - return project.getConvention().getPlugin(JavaPluginConvention.class) - .getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getResources().getSrcDirs(); - } - - public AppArtifact getAppArtifact() { - return new AppArtifact(project.getGroup().toString(), project.getName(), - project.getVersion().toString()); - } - - public AppModelResolver getAppModelResolver() { - return getAppModelResolver(LaunchMode.NORMAL); - } - - public AppModelResolver getAppModelResolver(LaunchMode mode) { - return new AppModelGradleResolver(project, getQuarkusModel(mode)); - } - - public QuarkusModel getQuarkusModel() { - return getQuarkusModel(LaunchMode.NORMAL); - } - - public QuarkusModel getQuarkusModel(LaunchMode mode) { - return create(project, mode); - } - - private QuarkusModel create(Project project, LaunchMode mode) { - QuarkusModelBuilder builder = new QuarkusModelBuilder(); - ModelParameter params = new ModelParameterImpl(); - params.setMode(mode.toString()); - return (QuarkusModel) builder.buildAll(QuarkusModel.class.getName(), params, project); - } - - /** - * Returns the last file from the specified {@link FileCollection}. - * Needed for the Scala plugin. - */ - private File getLastFile(FileCollection fileCollection) { - File result = null; - for (File f : fileCollection) { - if (result == null || f.exists()) { - result = f; - } - } - return result; - } - -} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/builder/QuarkusModelBuilder.java b/devtools/gradle/src/main/java/io/quarkus/gradle/builder/QuarkusModelBuilder.java index 60f202b688aec..8a1115b1a96f5 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/builder/QuarkusModelBuilder.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/builder/QuarkusModelBuilder.java @@ -1,29 +1,43 @@ package io.quarkus.gradle.builder; +import static io.quarkus.bootstrap.resolver.model.impl.ArtifactCoordsImpl.TYPE_JAR; + import java.io.File; +import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Properties; import java.util.Set; +import java.util.stream.Collectors; import org.gradle.api.GradleException; import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.UnknownDomainObjectException; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ExternalModuleDependency; import org.gradle.api.artifacts.ModuleDependency; +import org.gradle.api.artifacts.ModuleVersionIdentifier; import org.gradle.api.artifacts.ResolvedArtifact; import org.gradle.api.artifacts.ResolvedConfiguration; import org.gradle.api.artifacts.ResolvedDependency; import org.gradle.api.artifacts.component.ProjectComponentIdentifier; import org.gradle.api.attributes.Category; +import org.gradle.api.initialization.IncludedBuild; +import org.gradle.api.internal.artifacts.dependencies.DefaultDependencyArtifact; import org.gradle.api.internal.artifacts.dependencies.DefaultExternalModuleDependency; import org.gradle.api.plugins.Convention; import org.gradle.api.plugins.JavaPlugin; @@ -32,6 +46,7 @@ import org.gradle.tooling.provider.model.ParameterizedToolingModelBuilder; import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.model.AppArtifactKey; import io.quarkus.bootstrap.resolver.model.ArtifactCoords; import io.quarkus.bootstrap.resolver.model.Dependency; import io.quarkus.bootstrap.resolver.model.ModelParameter; @@ -45,12 +60,25 @@ import io.quarkus.bootstrap.resolver.model.impl.WorkspaceImpl; import io.quarkus.bootstrap.resolver.model.impl.WorkspaceModuleImpl; import io.quarkus.bootstrap.util.QuarkusModelHelper; +import io.quarkus.gradle.QuarkusPlugin; import io.quarkus.gradle.tasks.QuarkusGradleUtils; import io.quarkus.runtime.LaunchMode; +import io.quarkus.runtime.util.HashUtil; + +public class QuarkusModelBuilder implements ParameterizedToolingModelBuilder { -public class QuarkusModelBuilder implements ParameterizedToolingModelBuilder { + private static final String MAIN_RESOURCES_OUTPUT = "build/resources/main"; + private static final String CLASSES_OUTPUT = "build/classes"; - private static final List scannedConfigurations = new LinkedList(); + private static Configuration classpathConfig(Project project, LaunchMode mode) { + if (LaunchMode.TEST.equals(mode)) { + return project.getConfigurations().getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME); + } + if (LaunchMode.DEVELOPMENT.equals(mode)) { + return project.getConfigurations().getByName(QuarkusPlugin.DEV_MODE_CONFIGURATION_NAME); + } + return project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME); + } @Override public boolean canBuild(String modelName) { @@ -58,7 +86,7 @@ public boolean canBuild(String modelName) { } @Override - public Class getParameterType() { + public Class getParameterType() { return ModelParameter.class; } @@ -70,109 +98,187 @@ public Object buildAll(String modelName, Project project) { } @Override - public Object buildAll(String modelName, Object parameter, Project project) { - LaunchMode mode = LaunchMode.valueOf(((ModelParameter) parameter).getMode()); + public Object buildAll(String modelName, ModelParameter parameter, Project project) { + LaunchMode mode = LaunchMode.valueOf(parameter.getMode()); - if (LaunchMode.TEST.equals(mode)) { - scannedConfigurations.add(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME); - } else { - scannedConfigurations.add(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME); - } + final List deploymentDeps = getEnforcedPlatforms(project); - if (LaunchMode.DEVELOPMENT.equals(mode)) { - scannedConfigurations.add(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME); - } + final Map platformProperties = resolvePlatformProperties(project, deploymentDeps); - final Collection directExtensionDependencies = getEnforcedPlatforms(project); + final Map appDependencies = new LinkedHashMap<>(); + final Set visitedDeps = new HashSet<>(); - final Map appDependencies = new HashMap<>(); - for (String configurationName : scannedConfigurations) { - final ResolvedConfiguration configuration = project.getConfigurations().getByName(configurationName) - .getResolvedConfiguration(); - appDependencies.putAll(collectDependencies(configuration, configurationName, mode, project)); - directExtensionDependencies - .addAll(getDirectExtensionDependencies(configuration.getFirstLevelModuleDependencies(), appDependencies, - new HashSet<>())); - } + final ResolvedConfiguration resolvedConfiguration = classpathConfig(project, mode).getResolvedConfiguration(); + collectDependencies(resolvedConfiguration, mode, project, appDependencies); + collectFirstMetDeploymentDeps(resolvedConfiguration.getFirstLevelModuleDependencies(), appDependencies, + deploymentDeps, visitedDeps); - final Set extensionDependencies = collectExtensionDependencies(project, directExtensionDependencies); + final List extensionDependencies = collectExtensionDependencies(project, deploymentDeps); ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(), project.getVersion().toString()); - return new QuarkusModelImpl(new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject())), - new HashSet<>(appDependencies.values()), - extensionDependencies); + return new QuarkusModelImpl( + new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject(), mode, appArtifactCoords)), + new LinkedList<>(appDependencies.values()), + extensionDependencies, + deploymentDeps.stream().map(QuarkusModelBuilder::toEnforcedPlatformDependency) + .filter(Objects::nonNull).collect(Collectors.toList()), + platformProperties); } - public Set getWorkspace(Project project) { + private Map resolvePlatformProperties(Project project, + List deploymentDeps) { + final Configuration boms = project.getConfigurations() + .detachedConfiguration(deploymentDeps.toArray(new org.gradle.api.artifacts.Dependency[0])); + final Map platformProps = new HashMap<>(); + final Set descriptorKeys = new HashSet<>(4); + final Set propertyKeys = new HashSet<>(2); + boms.getResolutionStrategy().eachDependency(d -> { + final String group = d.getTarget().getGroup(); + final String name = d.getTarget().getName(); + if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) { + descriptorKeys.add(new AppArtifactKey(group, + name.substring(0, name.length() - BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX.length()), + d.getTarget().getVersion())); + } else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) { + final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); + dep.setExtension("properties"); + dep.setType("properties"); + dep.setName(name); + + final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency( + group, name, d.getTarget().getVersion(), null); + gradleDep.addArtifact(dep); + + for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep) + .getResolvedConfiguration().getResolvedArtifacts()) { + if (a.getName().equals(name)) { + final Properties props = new Properties(); + try (InputStream is = new FileInputStream(a.getFile())) { + props.load(is); + } catch (IOException e) { + throw new GradleException("Failed to read properties from " + a.getFile(), e); + } + for (Map.Entry prop : props.entrySet()) { + final String propName = String.valueOf(prop.getKey()); + if (propName.startsWith(BootstrapConstants.PLATFORM_PROPERTY_PREFIX)) { + platformProps.put(propName, String.valueOf(prop.getValue())); + } + } + break; + } + } + propertyKeys.add(new AppArtifactKey(group, + name.substring(0, name.length() - BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX.length()), + d.getTarget().getVersion())); + } + + }); + boms.getResolvedConfiguration(); + if (!descriptorKeys.containsAll(propertyKeys)) { + final StringBuilder buf = new StringBuilder(); + buf.append( + "The Quarkus platform properties applied to the project are missing the corresponding Quarkus platform BOM imports:"); + final int l = buf.length(); + for (AppArtifactKey key : propertyKeys) { + if (!descriptorKeys.contains(key)) { + if (l - buf.length() < 0) { + buf.append(','); + } + buf.append(' ').append(key); + } + } + throw new GradleException(buf.toString()); + } + return platformProps; + } + + public Set getWorkspace(Project project, LaunchMode mode, ArtifactCoords mainModuleCoord) { Set modules = new HashSet<>(); for (Project subproject : project.getAllprojects()) { final Convention convention = subproject.getConvention(); JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class); - if (javaConvention == null) { + if (javaConvention == null || !javaConvention.getSourceSets().getNames().contains(SourceSet.MAIN_SOURCE_SET_NAME)) { continue; } - modules.add(getWorkspaceModule(subproject)); + if (subproject.getName().equals(mainModuleCoord.getArtifactId()) + && subproject.getGroup().equals(mainModuleCoord.getGroupId())) { + modules.add(getWorkspaceModule(subproject, mode, true)); + } else { + modules.add(getWorkspaceModule(subproject, mode, false)); + } + } return modules; } - private WorkspaceModule getWorkspaceModule(Project project) { + private WorkspaceModule getWorkspaceModule(Project project, LaunchMode mode, boolean isMainModule) { ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(), project.getVersion().toString()); final SourceSet mainSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME); - - return new WorkspaceModuleImpl(appArtifactCoords, project.getProjectDir().getAbsoluteFile(), - project.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), convert(mainSourceSet)); + final SourceSetImpl modelSourceSet = convert(mainSourceSet); + WorkspaceModuleImpl workspaceModule = new WorkspaceModuleImpl(appArtifactCoords, + project.getProjectDir().getAbsoluteFile(), + project.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), modelSourceSet); + if (isMainModule && mode == LaunchMode.TEST) { + final SourceSet testSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.TEST_SOURCE_SET_NAME); + workspaceModule.getSourceSet().getSourceDirectories().addAll(testSourceSet.getOutput().getClassesDirs().getFiles()); + } + return workspaceModule; } - private Set getEnforcedPlatforms(Project project) { - final Set directExtension = new HashSet<>(); + private List getEnforcedPlatforms(Project project) { + final List directExtension = new ArrayList<>(); // collect enforced platforms final Configuration impl = project.getConfigurations() .getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); + for (org.gradle.api.artifacts.Dependency d : impl.getAllDependencies()) { if (!(d instanceof ModuleDependency)) { continue; } final ModuleDependency module = (ModuleDependency) d; final Category category = module.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE); - if (category != null && Category.ENFORCED_PLATFORM.equals(category.getName())) { + if (category != null && (Category.ENFORCED_PLATFORM.equals(category.getName()) + || Category.REGULAR_PLATFORM.equals(category.getName()))) { directExtension.add(d); } } return directExtension; } - private Set getDirectExtensionDependencies(Set dependencies, - Map appDependencies, Set visited) { - Set extensions = new HashSet<>(); + private void collectFirstMetDeploymentDeps(Set dependencies, + Map appDependencies, List extensionDeps, + Set visited) { for (ResolvedDependency d : dependencies) { - ArtifactCoords key = new ArtifactCoordsImpl(d.getModuleGroup(), d.getModuleName(), ""); - if (!visited.add(key)) { - continue; - } + boolean addChildExtension = true; + boolean wasDependencyVisited = false; + for (ResolvedArtifact artifact : d.getModuleArtifacts()) { + ModuleVersionIdentifier moduleIdentifier = artifact.getModuleVersion().getId(); + ArtifactCoords key = toAppDependenciesKey(moduleIdentifier.getGroup(), moduleIdentifier.getName(), + artifact.getClassifier()); + if (!visited.add(key)) { + continue; + } - Dependency appDep = appDependencies.get(key); - if (appDep == null) { - continue; - } - final org.gradle.api.artifacts.Dependency deploymentArtifact = getDeploymentArtifact(appDep); + Dependency appDep = appDependencies.get(key); + if (appDep == null) { + continue; + } - boolean addChildExtension = true; - if (deploymentArtifact != null && addChildExtension) { - extensions.add(deploymentArtifact); - addChildExtension = false; + wasDependencyVisited = true; + final org.gradle.api.artifacts.Dependency deploymentArtifact = getDeploymentArtifact(appDep); + if (deploymentArtifact != null) { + extensionDeps.add(deploymentArtifact); + addChildExtension = false; + } } - final Set resolvedChildren = d.getChildren(); - if (addChildExtension && !resolvedChildren.isEmpty()) { - extensions - .addAll(getDirectExtensionDependencies(resolvedChildren, appDependencies, visited)); + if (wasDependencyVisited && addChildExtension && !resolvedChildren.isEmpty()) { + collectFirstMetDeploymentDeps(resolvedChildren, appDependencies, extensionDeps, visited); } } - return extensions; } private org.gradle.api.artifacts.Dependency getDeploymentArtifact(Dependency dependency) { @@ -208,9 +314,9 @@ private org.gradle.api.artifacts.Dependency getDeploymentArtifact(Dependency dep return null; } - private Set collectExtensionDependencies(Project project, + private List collectExtensionDependencies(Project project, Collection extensions) { - final Set platformDependencies = new HashSet<>(); + final List platformDependencies = new LinkedList<>(); final Configuration deploymentConfig = project.getConfigurations() .detachedConfiguration(extensions.toArray(new org.gradle.api.artifacts.Dependency[0])); @@ -220,41 +326,145 @@ private Set collectExtensionDependencies(Project project, continue; } - final Dependency dependency = toDependency(a, deploymentConfig.getName()); + final Dependency dependency = toDependency(a); platformDependencies.add(dependency); } return platformDependencies; } - private Map collectDependencies(ResolvedConfiguration configuration, String configurationName, - LaunchMode mode, Project project) { - Map modelDependencies = new HashMap<>(); - for (ResolvedArtifact a : configuration.getResolvedArtifacts()) { + private void collectDependencies(ResolvedConfiguration configuration, + LaunchMode mode, Project project, Map appDependencies) { + + final Set artifacts = configuration.getResolvedArtifacts(); + Set artifactFiles = null; + + // if the number of artifacts is less than the number of files then probably + // the project includes direct file dependencies + if (artifacts.size() < configuration.getFiles().size()) { + artifactFiles = new HashSet<>(artifacts.size()); + } + for (ResolvedArtifact a : artifacts) { if (!isDependency(a)) { continue; } - Dependency dep; + final DependencyImpl dep = initDependency(a); if (LaunchMode.DEVELOPMENT.equals(mode) && a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) { - Project projectDep = project.getRootProject() - .findProject(((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath()); - - dep = toDependency(a, configurationName, projectDep); + IncludedBuild includedBuild = includedBuild(project, a.getName()); + if (includedBuild != null) { + addSubstitutedProject(dep, includedBuild.getProjectDir()); + } else { + Project projectDep = project.getRootProject() + .findProject(((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath()); + addDevModePaths(dep, a, projectDep); + } } else { - dep = toDependency(a, configurationName); + dep.addPath(a.getFile()); + } + appDependencies.put(toAppDependenciesKey(dep.getGroupId(), dep.getName(), dep.getClassifier()), dep); + if (artifactFiles != null) { + artifactFiles.add(a.getFile()); } - ArtifactCoords gaKey = new ArtifactCoordsImpl(dep.getGroupId(), dep.getName(), ""); - modelDependencies.put(gaKey, dep); } - return modelDependencies; + if (artifactFiles != null) { + // detect FS paths that aren't provided by the resolved artifacts + for (File f : configuration.getFiles()) { + if (artifactFiles.contains(f)) { + continue; + } + // here we are trying to represent a direct FS path dependency + // as an artifact dependency + // SHA1 hash is used to avoid long file names in the lib dir + final String parentPath = f.getParent(); + final String group = HashUtil.sha1(parentPath == null ? f.getName() : parentPath); + String name = f.getName(); + String type = "jar"; + if (!f.isDirectory()) { + final int dot = f.getName().lastIndexOf('.'); + if (dot > 0) { + name = f.getName().substring(0, dot); + type = f.getName().substring(dot + 1); + } + } + // hash could be a better way to represent the version + final String version = String.valueOf(f.lastModified()); + final ArtifactCoords key = toAppDependenciesKey(group, name, ""); + final DependencyImpl dep = new DependencyImpl(name, group, version, "compile", type, null); + dep.addPath(f); + appDependencies.put(key, dep); + } + } + } + + private void addDevModePaths(final DependencyImpl dep, ResolvedArtifact a, Project project) { + final JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class); + if (javaConvention == null) { + dep.addPath(a.getFile()); + return; + } + final SourceSet mainSourceSet = javaConvention.getSourceSets().findByName(SourceSet.MAIN_SOURCE_SET_NAME); + if (mainSourceSet == null) { + dep.addPath(a.getFile()); + return; + } + + final File classesDir = new File(QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false)); + if (classesDir.exists()) { + dep.addPath(classesDir); + } + for (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) { + if (resourcesDir.exists()) { + dep.addPath(resourcesDir); + } + } + final Task resourcesTask = project.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); + for (File outputDir : resourcesTask.getOutputs().getFiles()) { + if (outputDir.exists()) { + dep.addPath(outputDir); + } + } + } + + private void addSubstitutedProject(final DependencyImpl dep, File projectFile) { + File mainResourceDirectory = new File(projectFile, MAIN_RESOURCES_OUTPUT); + if (mainResourceDirectory.exists()) { + dep.addPath(mainResourceDirectory); + } + File classesOutput = new File(projectFile, CLASSES_OUTPUT); + File[] languageDirectories = classesOutput.listFiles(); + if (languageDirectories == null) { + throw new GradleException( + "The project does not contain a class output directory. " + classesOutput.getPath() + " must exist."); + } + for (File languageDirectory : languageDirectories) { + if (languageDirectory.isDirectory()) { + for (File sourceSet : languageDirectory.listFiles()) { + if (sourceSet.isDirectory() && sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)) { + dep.addPath(sourceSet); + } + } + } + } + } + + private IncludedBuild includedBuild(final Project project, final String projectName) { + try { + return project.getGradle().includedBuild(projectName); + } catch (UnknownDomainObjectException ignore) { + return null; + } } private SourceSetImpl convert(SourceSet sourceSet) { + if (sourceSet.getOutput().getResourcesDir().exists()) { + return new SourceSetImpl( + sourceSet.getOutput().getClassesDirs().getFiles(), + sourceSet.getOutput().getResourcesDir()); + } return new SourceSetImpl( - sourceSet.getOutput().getClassesDirs().getFiles(), - sourceSet.getOutput().getResourcesDir()); + sourceSet.getOutput().getClassesDirs().getFiles()); } private io.quarkus.bootstrap.resolver.model.SourceSet getSourceSourceSet(SourceSet sourceSet) { @@ -267,33 +477,39 @@ private static boolean isDependency(ResolvedArtifact a) { a.getFile().isDirectory(); } - private DependencyImpl toDependency(ResolvedArtifact a, String configuration, Project project) { - final String[] split = a.getModuleVersion().toString().split(":"); - final DependencyImpl dependency = new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null, - configuration, a.getType(), a.getClassifier()); - final JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class); - if (javaConvention != null) { - SourceSet mainSourceSet = javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME); - final File classesDir = new File(QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false)); - if (classesDir.exists()) { - dependency.addPath(classesDir); - } - for (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) { - if (resourcesDir.exists()) { - dependency.addPath(resourcesDir); - } + /** + * Creates an instance of Dependency and associates it with the ResolvedArtifact's path + */ + static Dependency toDependency(ResolvedArtifact a) { + final DependencyImpl dependency = initDependency(a); + dependency.addPath(a.getFile()); + return dependency; + } + + static Dependency toEnforcedPlatformDependency(org.gradle.api.artifacts.Dependency dependency) { + if (dependency instanceof ExternalModuleDependency) { + ExternalModuleDependency emd = (ExternalModuleDependency) dependency; + Category category = emd.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE); + if (category != null && Category.ENFORCED_PLATFORM.equals(category.getName())) { + return new DependencyImpl(emd.getName(), emd.getGroup(), emd.getVersion(), + "compile", "pom", null); } } - return dependency; + return null; } - static DependencyImpl toDependency(ResolvedArtifact a, String configuration) { + /** + * Creates an instance of DependencyImpl but does not associates it with a path + */ + private static DependencyImpl initDependency(ResolvedArtifact a) { final String[] split = a.getModuleVersion().toString().split(":"); - - final DependencyImpl dependency = new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null, - configuration, a.getType(), a.getClassifier()); - dependency.addPath(a.getFile()); - return dependency; + return new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null, + "compile", a.getType(), a.getClassifier()); } + private static ArtifactCoords toAppDependenciesKey(String groupId, String artifactId, String classifier) { + // Default classifier is empty string and not null value, lets keep it that way + classifier = classifier == null ? "" : classifier; + return new ArtifactCoordsImpl(groupId, artifactId, classifier, "", TYPE_JAR); + } } diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java b/devtools/gradle/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java new file mode 100644 index 0000000000000..e350e49532903 --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java @@ -0,0 +1,255 @@ +package io.quarkus.gradle.extension; + +import java.io.File; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.StringJoiner; +import java.util.stream.Collectors; + +import org.gradle.api.Action; +import org.gradle.api.Project; +import org.gradle.api.file.FileCollection; +import org.gradle.api.file.RegularFile; +import org.gradle.api.plugins.Convention; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.testing.Test; +import org.gradle.jvm.tasks.Jar; + +import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppModel; +import io.quarkus.bootstrap.resolver.AppModelResolver; +import io.quarkus.bootstrap.resolver.model.ModelParameter; +import io.quarkus.bootstrap.resolver.model.QuarkusModel; +import io.quarkus.bootstrap.resolver.model.impl.ModelParameterImpl; +import io.quarkus.gradle.AppModelGradleResolver; +import io.quarkus.gradle.builder.QuarkusModelBuilder; +import io.quarkus.gradle.tasks.QuarkusGradleUtils; +import io.quarkus.runtime.LaunchMode; + +public class QuarkusPluginExtension { + + private final Project project; + + private File outputDirectory; + + private String finalName; + + private File sourceDir; + + private File workingDir; + + private File outputConfigDirectory; + + private final SourceSetExtension sourceSetExtension; + + public QuarkusPluginExtension(Project project) { + this.project = project; + this.sourceSetExtension = new SourceSetExtension(); + } + + public void beforeTest(Test task) { + try { + final Map props = task.getSystemProperties(); + + final AppModel appModel = getAppModelResolver(LaunchMode.TEST) + .resolveModel(getAppArtifact()); + final Path serializedModel = QuarkusGradleUtils.serializeAppModel(appModel, task); + props.put(BootstrapConstants.SERIALIZED_APP_MODEL, serializedModel.toString()); + + StringJoiner outputSourcesDir = new StringJoiner(","); + for (File outputSourceDir : combinedOutputSourceDirs()) { + outputSourcesDir.add(outputSourceDir.getAbsolutePath()); + } + props.put(BootstrapConstants.OUTPUT_SOURCES_DIR, outputSourcesDir.toString()); + + // Identify the folder containing the sources associated with this test task + String fileList = getSourceSets().stream() + .filter(sourceSet -> Objects.equals( + task.getTestClassesDirs().getAsPath(), + sourceSet.getOutput().getClassesDirs().getAsPath())) + .flatMap(sourceSet -> sourceSet.getOutput().getClassesDirs().getFiles().stream()) + .filter(File::exists) + .distinct() + .map(testSrcDir -> String.format("%s:%s", + project.relativePath(testSrcDir), + project.relativePath(outputDirectory()))) + .collect(Collectors.joining(",")); + task.environment(BootstrapConstants.TEST_TO_MAIN_MAPPINGS, fileList); + + final String nativeRunner = task.getProject().getBuildDir().toPath().resolve(finalName() + "-runner") + .toAbsolutePath() + .toString(); + props.put("native.image.path", nativeRunner); + } catch (Exception e) { + throw new IllegalStateException("Failed to resolve deployment classpath", e); + } + } + + public Path appJarOrClasses() { + final Jar jarTask = (Jar) project.getTasks().findByName(JavaPlugin.JAR_TASK_NAME); + if (jarTask == null) { + throw new RuntimeException("Failed to locate task 'jar' in the project."); + } + final Provider jarProvider = jarTask.getArchiveFile(); + Path classesDir = null; + if (jarProvider.isPresent()) { + final File f = jarProvider.get().getAsFile(); + if (f.exists()) { + classesDir = f.toPath(); + } + } + if (classesDir == null) { + final Convention convention = project.getConvention(); + JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class); + if (javaConvention != null) { + final SourceSet mainSourceSet = javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME); + final String classesPath = QuarkusGradleUtils.getClassesDir(mainSourceSet, jarTask.getTemporaryDir()); + if (classesPath != null) { + classesDir = Paths.get(classesPath); + } + } + } + if (classesDir == null) { + throw new RuntimeException("Failed to locate project's classes directory"); + } + return classesDir; + } + + public File outputDirectory() { + if (outputDirectory == null) { + outputDirectory = getLastFile(getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput() + .getClassesDirs()); + } + return outputDirectory; + } + + public void setOutputDirectory(String outputDirectory) { + this.outputDirectory = new File(outputDirectory); + } + + public File outputConfigDirectory() { + if (outputConfigDirectory == null) { + outputConfigDirectory = getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput() + .getResourcesDir(); + } + return outputConfigDirectory; + } + + public void setOutputConfigDirectory(String outputConfigDirectory) { + this.outputConfigDirectory = new File(outputConfigDirectory); + } + + public File sourceDir() { + if (sourceDir == null) { + sourceDir = getLastFile(getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getAllJava() + .getSourceDirectories()); + } + return sourceDir; + } + + public void setSourceDir(String sourceDir) { + this.sourceDir = new File(sourceDir); + } + + public File workingDir() { + if (workingDir == null) { + workingDir = outputDirectory(); + } + return workingDir; + } + + public void setWorkingDir(String workingDir) { + this.workingDir = new File(workingDir); + } + + public String finalName() { + if (finalName == null || finalName.length() == 0) { + this.finalName = String.format("%s-%s", project.getName(), project.getVersion()); + } + return finalName; + } + + public void setFinalName(String finalName) { + this.finalName = finalName; + } + + public void sourceSets(Action action) { + action.execute(this.sourceSetExtension); + } + + public SourceSetExtension sourceSetExtension() { + return sourceSetExtension; + } + + public Set resourcesDir() { + return getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getResources().getSrcDirs(); + } + + public Set combinedOutputSourceDirs() { + Set sourcesDirs = new LinkedHashSet<>(); + sourcesDirs.addAll(getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput().getClassesDirs().getFiles()); + sourcesDirs.addAll(getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME).getOutput().getClassesDirs().getFiles()); + return sourcesDirs; + } + + public AppArtifact getAppArtifact() { + return new AppArtifact(project.getGroup().toString(), project.getName(), + project.getVersion().toString()); + } + + public AppModelResolver getAppModelResolver() { + return getAppModelResolver(LaunchMode.NORMAL); + } + + public AppModelResolver getAppModelResolver(LaunchMode mode) { + return new AppModelGradleResolver(project, getQuarkusModel(mode)); + } + + public QuarkusModel getQuarkusModel() { + return getQuarkusModel(LaunchMode.NORMAL); + } + + public QuarkusModel getQuarkusModel(LaunchMode mode) { + return create(project, mode); + } + + private QuarkusModel create(Project project, LaunchMode mode) { + QuarkusModelBuilder builder = new QuarkusModelBuilder(); + ModelParameter params = new ModelParameterImpl(); + params.setMode(mode.toString()); + return (QuarkusModel) builder.buildAll(QuarkusModel.class.getName(), params, project); + } + + /** + * Returns the last file from the specified {@link FileCollection}. + * Needed for the Scala plugin. + */ + private File getLastFile(FileCollection fileCollection) { + File result = null; + for (File f : fileCollection) { + if (result == null || f.exists()) { + result = f; + } + } + return result; + } + + /** + * Convenience method to get the source sets associated with the current project. + * + * @return the source sets associated with the current project. + */ + private SourceSetContainer getSourceSets() { + return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); + } + +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/extension/SourceSetExtension.java b/devtools/gradle/src/main/java/io/quarkus/gradle/extension/SourceSetExtension.java new file mode 100644 index 0000000000000..7685ddb1c4eaa --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/extension/SourceSetExtension.java @@ -0,0 +1,16 @@ +package io.quarkus.gradle.extension; + +import org.gradle.api.tasks.SourceSet; + +public class SourceSetExtension { + + private SourceSet extraNativeTestSourceSet; + + public SourceSet extraNativeTest() { + return extraNativeTestSourceSet; + } + + public void setExtraNativeTest(SourceSet extraNativeTestSourceSet) { + this.extraNativeTestSourceSet = extraNativeTestSourceSet; + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleDevModeLauncher.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleDevModeLauncher.java new file mode 100644 index 0000000000000..b93d03da8cd1a --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleDevModeLauncher.java @@ -0,0 +1,51 @@ +package io.quarkus.gradle.tasks; + +import org.gradle.api.logging.Logger; + +import io.quarkus.deployment.dev.QuarkusDevModeLauncher; + +public class GradleDevModeLauncher extends QuarkusDevModeLauncher { + + /** + * Initializes the launcher builder + * + * @param logger the logger + * @return launcher builder + */ + public static Builder builder(Logger logger) { + return new GradleDevModeLauncher(logger).new Builder(); + } + + public class Builder extends QuarkusDevModeLauncher.Builder { + + private Builder() { + super(null); + } + } + + private final Logger logger; + + private GradleDevModeLauncher(Logger logger) { + this.logger = logger; + } + + @Override + protected boolean isDebugEnabled() { + return logger.isDebugEnabled(); + } + + @Override + protected void debug(Object msg) { + logger.warn(msg == null ? "null" : msg.toString()); + } + + @Override + protected void error(Object msg) { + logger.error(msg == null ? "null" : msg.toString()); + } + + @Override + protected void warn(Object msg) { + logger.warn(msg == null ? "null" : msg.toString()); + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleMessageWriter.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleMessageWriter.java index 00deba251f42e..50b090a93350a 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleMessageWriter.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/GradleMessageWriter.java @@ -2,7 +2,7 @@ import org.gradle.api.logging.Logger; -import io.quarkus.platform.tools.MessageWriter; +import io.quarkus.devtools.messagewriter.MessageWriter; public class GradleMessageWriter implements MessageWriter { diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusAddExtension.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusAddExtension.java index cab182f13f01e..2639c3cdfbbc2 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusAddExtension.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusAddExtension.java @@ -1,21 +1,17 @@ package io.quarkus.gradle.tasks; import static java.util.Arrays.stream; -import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; -import java.net.URL; import java.util.List; import java.util.Set; import org.gradle.api.GradleException; import org.gradle.api.tasks.Input; -import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.options.Option; import io.quarkus.devtools.commands.AddExtensions; -import io.quarkus.registry.DefaultExtensionRegistry; public class QuarkusAddExtension extends QuarkusPlatformTask { @@ -24,7 +20,6 @@ public QuarkusAddExtension() { } private List extensionsToAdd; - private List registries; @Option(option = "extensions", description = "Configures the extensions to be added.") public void setExtensionsToAdd(List extensionsToAdd) { @@ -36,17 +31,6 @@ public List getExtensionsToAdd() { return extensionsToAdd; } - @Optional - @Input - public List getRegistries() { - return registries; - } - - @Option(description = "The extension registry URLs to be used", option = "registry") - public void setRegistries(List registry) { - this.registries = registry; - } - @TaskAction public void addExtension() { Set extensionsSet = getExtensionsToAdd() @@ -56,14 +40,8 @@ public void addExtension() { .collect(toSet()); try { - AddExtensions addExtensions = new AddExtensions(getQuarkusProject()) + AddExtensions addExtensions = new AddExtensions(getQuarkusProject(false)) .extensions(extensionsSet); - if (registries != null && !registries.isEmpty()) { - List urls = registries.stream() - .map(QuarkusAddExtension::toURL) - .collect(toList()); - addExtensions.extensionRegistry(DefaultExtensionRegistry.fromURLs(urls)); - } addExtensions.execute(); } catch (Exception e) { throw new GradleException("Failed to add extensions " + getExtensionsToAdd(), e); diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusBuild.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusBuild.java index 47318d44ce740..76e7231fab909 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusBuild.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusBuild.java @@ -1,6 +1,7 @@ package io.quarkus.gradle.tasks; import java.io.File; +import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -14,6 +15,7 @@ import org.gradle.api.tasks.Classpath; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.OutputDirectory; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.TaskAction; @@ -30,8 +32,6 @@ public class QuarkusBuild extends QuarkusTask { private static final String NATIVE_PROPERTY_NAMESPACE = "quarkus.native"; - private boolean uberJar; - private List ignoredEntries = new ArrayList<>(); public QuarkusBuild() { @@ -47,16 +47,6 @@ public QuarkusBuild nativeArgs(Action> action) { return this; } - @Input - public boolean isUberJar() { - return uberJar; - } - - @Option(description = "Set to true if the build task should build an uberjar", option = "uber-jar") - public void setUberJar(boolean uberJar) { - this.uberJar = uberJar; - } - @Optional @Input public List getIgnoredEntries() { @@ -77,14 +67,47 @@ public FileCollection getClasspath() { .plus(mainSourceSet.getResources()); } + @Input + public Map getQuarkusBuildSystemProperties() { + Map quarkusSystemProperties = new HashMap<>(); + for (Map.Entry systemProperty : System.getProperties().entrySet()) { + if (systemProperty.getKey().toString().startsWith("quarkus.") && + systemProperty.getValue() instanceof Serializable) { + quarkusSystemProperties.put(systemProperty.getKey(), systemProperty.getValue()); + } + } + return quarkusSystemProperties; + } + + @Input + public Map getQuarkusBuildEnvProperties() { + Map quarkusEnvProperties = new HashMap<>(); + for (Map.Entry systemProperty : System.getenv().entrySet()) { + if (systemProperty.getKey() != null && systemProperty.getKey().startsWith("QUARKUS_")) { + quarkusEnvProperties.put(systemProperty.getKey(), systemProperty.getValue()); + } + } + return quarkusEnvProperties; + } + @OutputFile - public File getOutputDir() { + public File getRunnerJar() { return new File(getProject().getBuildDir(), extension().finalName() + "-runner.jar"); } + @OutputFile + public File getNativeRunner() { + return new File(getProject().getBuildDir(), extension().finalName() + "-runner"); + } + + @OutputDirectory + public File getFastJar() { + return new File(getProject().getBuildDir(), "quarkus-app"); + } + @TaskAction public void buildQuarkus() { - getLogger().lifecycle("building quarkus runner"); + getLogger().lifecycle("building quarkus jar"); final AppArtifact appArtifact = extension().getAppArtifact(); appArtifact.setPaths(QuarkusGradleUtils.getOutputPaths(getProject())); @@ -95,11 +118,6 @@ public void buildQuarkus() { String joinedEntries = String.join(",", ignoredEntries); effectiveProperties.setProperty("quarkus.package.user-configured-ignored-entries", joinedEntries); } - boolean clear = false; - if (uberJar && System.getProperty("quarkus.package.uber-jar") == null) { - System.setProperty("quarkus.package.uber-jar", "true"); - clear = true; - } try (CuratedApplication appCreationContext = QuarkusBootstrap.builder() .setBaseClassLoader(getClass().getClassLoader()) .setAppModelResolver(modelResolver) @@ -122,10 +140,6 @@ public void buildQuarkus() { } catch (BootstrapException e) { throw new GradleException("Failed to build a runnable JAR", e); - } finally { - if (clear) { - System.clearProperty("quarkus.package.uber-jar"); - } } } diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusDev.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusDev.java index a878eb3d76519..d8c94a0a74494 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusDev.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusDev.java @@ -1,31 +1,18 @@ package io.quarkus.gradle.tasks; -import java.io.ByteArrayOutputStream; -import java.io.DataOutputStream; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.ObjectOutputStream; -import java.net.InetAddress; -import java.net.Socket; -import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; -import java.util.jar.Attributes; -import java.util.jar.Manifest; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; import javax.inject.Inject; @@ -55,10 +42,8 @@ import io.quarkus.bootstrap.resolver.AppModelResolver; import io.quarkus.bootstrap.resolver.AppModelResolverException; import io.quarkus.deployment.dev.DevModeContext; -import io.quarkus.deployment.dev.DevModeMain; -import io.quarkus.gradle.QuarkusPluginExtension; +import io.quarkus.deployment.dev.QuarkusDevModeLauncher; import io.quarkus.runtime.LaunchMode; -import io.quarkus.utilities.JavaBinFinder; public class QuarkusDev extends QuarkusTask { @@ -181,9 +166,6 @@ public void setCompilerArgs(List compilerArgs) { @TaskAction public void startDev() { - Project project = getProject(); - QuarkusPluginExtension extension = (QuarkusPluginExtension) project.getExtensions().findByName("quarkus"); - if (!getSourceDir().isDirectory()) { throw new GradleException("The `src/main/java` directory is required, please create it."); } @@ -194,205 +176,114 @@ public void startDev() { "Does the project have any source files?"); } - DevModeContext context = new DevModeContext(); - context.setProjectDir(project.getProjectDir()); - for (Map.Entry e : System.getProperties().entrySet()) { - context.getSystemProperties().put(e.getKey().toString(), (String) e.getValue()); + try { + QuarkusDevModeLauncher runner = newLauncher(); + getProject().exec(action -> { + action.commandLine(runner.args()).workingDir(getWorkingDir()); + action.setStandardInput(System.in) + .setErrorOutput(System.out) + .setStandardOutput(System.out); + }); + + } catch (Exception e) { + throw new GradleException("Failed to run", e); + } + } + + private QuarkusDevModeLauncher newLauncher() throws Exception { + final Project project = getProject(); + GradleDevModeLauncher.Builder builder = GradleDevModeLauncher.builder(getLogger()) + .preventnoverify(isPreventnoverify()) + .projectDir(project.getProjectDir()) + .buildDir(getBuildDir()) + .outputDir(getBuildDir()) + .debug(System.getProperty("debug")) + .debugHost(System.getProperty("debugHost", "localhost")) + .suspend(System.getProperty("suspend")); + + if (getJvmArgs() != null) { + builder.jvmArgs(getJvmArgs()); } + for (Map.Entry e : project.getProperties().entrySet()) { if (e.getValue() instanceof String) { - context.getBuildSystemProperties().put(e.getKey(), e.getValue().toString()); + builder.buildSystemProperty(e.getKey(), e.getValue().toString()); } } // this is a minor hack to allow ApplicationConfig to be populated with defaults - context.getBuildSystemProperties().putIfAbsent("quarkus.application.name", project.getName()); + builder.applicationName(project.getName()); if (project.getVersion() != null) { - context.getBuildSystemProperties().putIfAbsent("quarkus.application.version", project.getVersion().toString()); - } + builder.applicationVersion(project.getVersion().toString()); - context.setSourceEncoding(getSourceEncoding()); - try { - List args = new ArrayList<>(); - args.add(JavaBinFinder.findBin()); - final String debug = System.getProperty("debug"); - final String suspend = System.getProperty("suspend"); - String debugSuspend = "n"; - if (suspend != null) { - switch (suspend.toLowerCase(Locale.ENGLISH)) { - case "n": - case "false": { - debugSuspend = "n"; - break; - } - case "": - case "y": - case "true": { - debugSuspend = "y"; - break; - } - default: { - System.err.println( - "Ignoring invalid value \"" + suspend + "\" for \"suspend\" param and defaulting to \"n\""); - break; - } - } - } - if (debug == null) { - // debug mode not specified - // make sure 5005 is not used, we don't want to just fail if something else is using it - try (Socket socket = new Socket(InetAddress.getByAddress(new byte[] { 127, 0, 0, 1 }), 5005)) { - System.err.println("Port 5005 in use, not starting in debug mode"); - } catch (IOException e) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:5005,server=y,suspend=" + debugSuspend); - } - } else if (debug.toLowerCase().equals("client")) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=n,suspend=" + debugSuspend); - } else if (debug.toLowerCase().equals("true") || debug.isEmpty()) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:5005,server=y,suspend=" + debugSuspend); - } else if (!debug.toLowerCase().equals("false")) { - try { - int port = Integer.parseInt(debug); - if (port <= 0) { - throw new GradleException("The specified debug port must be greater than 0"); - } - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:" + port + ",server=y,suspend=" + debugSuspend); - } catch (NumberFormatException e) { - throw new GradleException( - "Invalid value for debug parameter: " + debug + " must be true|false|client|{port}"); - } - } - if (getJvmArgs() != null) { - args.addAll(getJvmArgs()); - } - - // the following flags reduce startup time and are acceptable only for dev purposes - args.add("-XX:TieredStopAtLevel=1"); - if (!isPreventnoverify()) { - args.add("-Xverify:none"); - } - - //build a class-path string for the base platform - //this stuff does not change - // Do not include URIs in the manifest, because some JVMs do not like that - StringBuilder classPathManifest = new StringBuilder(); - - final AppModel appModel; - final AppModelResolver modelResolver = extension().getAppModelResolver(LaunchMode.DEVELOPMENT); - try { - final AppArtifact appArtifact = extension.getAppArtifact(); - appArtifact.setPaths(QuarkusGradleUtils.getOutputPaths(project)); - appModel = modelResolver.resolveModel(appArtifact); - } catch (AppModelResolverException e) { - throw new GradleException("Failed to resolve application model " + extension.getAppArtifact() + " dependencies", - e); - } - - args.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager"); - - final Set projectDependencies = new HashSet<>(); - addSelfWithLocalDeps(project, context, new HashSet<>(), projectDependencies, true); - - for (AppDependency appDependency : appModel.getFullDeploymentDeps()) { - final AppArtifact appArtifact = appDependency.getArtifact(); - if (!projectDependencies.contains(new AppArtifactKey(appArtifact.getGroupId(), appArtifact.getArtifactId()))) { - appArtifact.getPaths().forEach(p -> { - if (Files.exists(p)) { - addToClassPaths(classPathManifest, p.toFile()); - } - }); - } - } + } - //we also want to add the maven plugin jar to the class path - //this allows us to just directly use classes, without messing around copying them - //to the runner jar - addGradlePluginDeps(classPathManifest, context); + builder.sourceEncoding(getSourceEncoding()); - context.setCacheDir(new File(getBuildDir(), "transformer-cache").getAbsoluteFile()); + final AppModel appModel; + final AppModelResolver modelResolver = extension().getAppModelResolver(LaunchMode.DEVELOPMENT); + try { + final AppArtifact appArtifact = extension().getAppArtifact(); + appArtifact.setPaths(QuarkusGradleUtils.getOutputPaths(project)); + appModel = modelResolver.resolveModel(appArtifact); + } catch (AppModelResolverException e) { + throw new GradleException("Failed to resolve application model " + extension().getAppArtifact() + " dependencies", + e); + } - JavaPluginConvention javaPluginConvention = project.getConvention().findPlugin(JavaPluginConvention.class); - if (javaPluginConvention != null) { - context.setSourceJavaVersion(javaPluginConvention.getSourceCompatibility().toString()); - context.setTargetJvmVersion(javaPluginConvention.getTargetCompatibility().toString()); - } + final Set projectDependencies = new HashSet<>(); + addSelfWithLocalDeps(project, builder, new HashSet<>(), projectDependencies, true); - if (getCompilerArgs().isEmpty()) { - getJavaCompileTask() - .map(compileTask -> compileTask.getOptions().getCompilerArgs()) - .ifPresent(context::setCompilerOptions); - } else { - context.setCompilerOptions(getCompilerArgs()); + for (AppDependency appDependency : appModel.getFullDeploymentDeps()) { + final AppArtifact appArtifact = appDependency.getArtifact(); + if (!projectDependencies.contains(new AppArtifactKey(appArtifact.getGroupId(), appArtifact.getArtifactId()))) { + appArtifact.getPaths().forEach(p -> { + if (Files.exists(p)) { + addToClassPaths(builder, p.toFile()); + } + }); } + } - // this is the jar file we will use to launch the dev mode main class - final File tempFile = new File(getBuildDir(), extension.finalName() + "-dev.jar"); - tempFile.delete(); - tempFile.deleteOnExit(); - - context.setDevModeRunnerJarFile(tempFile); - modifyDevModeContext(context); - try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile))) { - out.putNextEntry(new ZipEntry("META-INF/")); - Manifest manifest = new Manifest(); - manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); - manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classPathManifest.toString()); - manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, DevModeMain.class.getName()); - out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF")); - manifest.write(out); - - out.putNextEntry(new ZipEntry(DevModeMain.DEV_MODE_CONTEXT)); - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes))) { - obj.writeObject(context); - } - out.write(bytes.toByteArray()); + //we also want to add the Gradle plugin to the class path + //this allows us to just directly use classes, without messing around copying them + //to the runner jar + addGradlePluginDeps(builder); - out.putNextEntry(new ZipEntry(BootstrapConstants.SERIALIZED_APP_MODEL)); - bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes))) { - obj.writeObject(appModel); - } - out.write(bytes.toByteArray()); - } + JavaPluginConvention javaPluginConvention = project.getConvention().findPlugin(JavaPluginConvention.class); + if (javaPluginConvention != null) { + builder.sourceJavaVersion(javaPluginConvention.getSourceCompatibility().toString()); + builder.targetJavaVersion(javaPluginConvention.getTargetCompatibility().toString()); + } - final Path serializedModel = QuarkusGradleUtils.serializeAppModel(appModel, this); - serializedModel.toFile().deleteOnExit(); - args.add("-D" + BootstrapConstants.SERIALIZED_APP_MODEL + "=" + serializedModel.toAbsolutePath()); + if (getCompilerArgs().isEmpty()) { + getJavaCompileTask() + .map(compileTask -> compileTask.getOptions().getCompilerArgs()) + .ifPresent(builder::compilerOptions); + } else { + builder.compilerOptions(getCompilerArgs()); + } - extension.outputDirectory().mkdirs(); + modifyDevModeContext(builder); - if (context.isEnablePreview()) { - args.add(DevModeContext.ENABLE_PREVIEW_FLAG); - } + final Path serializedModel = QuarkusGradleUtils.serializeAppModel(appModel, this); + serializedModel.toFile().deleteOnExit(); + builder.jvmArgs("-D" + BootstrapConstants.SERIALIZED_APP_MODEL + "=" + serializedModel.toAbsolutePath()); - args.add("-jar"); - args.add(tempFile.getAbsolutePath()); - args.addAll(this.getArgs()); - if (getLogger().isDebugEnabled()) { - getLogger().debug("Launching JVM with command line: {}", String.join(" ", args)); - } + extension().outputDirectory().mkdirs(); - project.exec(action -> { - action.commandLine(args).workingDir(getWorkingDir()); - action.setStandardInput(System.in) - .setErrorOutput(System.out) - .setStandardOutput(System.out); - }); - } catch (Exception e) { - throw new GradleException("Failed to run", e); + if (!args.isEmpty()) { + builder.applicationArgs(String.join(" ", args)); } + + return builder.build(); } - protected void modifyDevModeContext(DevModeContext devModeContext) { + protected void modifyDevModeContext(GradleDevModeLauncher.Builder builder) { } - private void addSelfWithLocalDeps(Project project, DevModeContext context, Set visited, + private void addSelfWithLocalDeps(Project project, GradleDevModeLauncher.Builder builder, Set visited, Set addedDeps, boolean root) { if (!visited.add(project.getPath())) { return; @@ -401,15 +292,16 @@ private void addSelfWithLocalDeps(Project project, DevModeContext context, Set { if (d instanceof ProjectDependency) { - addSelfWithLocalDeps(((ProjectDependency) d).getDependencyProject(), context, visited, addedDeps, false); + addSelfWithLocalDeps(((ProjectDependency) d).getDependencyProject(), builder, visited, addedDeps, false); } }); } - addLocalProject(project, context, addedDeps, root); + addLocalProject(project, builder, addedDeps, root); } - private void addLocalProject(Project project, DevModeContext context, Set addeDeps, boolean root) { + private void addLocalProject(Project project, GradleDevModeLauncher.Builder builder, Set addeDeps, + boolean root) { final AppArtifactKey key = new AppArtifactKey(project.getGroup().toString(), project.getName()); if (addeDeps.contains(key)) { return; @@ -420,7 +312,10 @@ private void addLocalProject(Project project, DevModeContext context, Set sourcePaths = new HashSet<>(); Set sourceParentPaths = new HashSet<>(); @@ -432,16 +327,18 @@ private void addLocalProject(Project project, DevModeContext context, Set getJavaCompileTask() { .ofNullable((JavaCompile) getProject().getTasks().getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME)); } - private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext context) { + private ResolvedDependency findQuarkusPluginDependency(Set dependencies) { + for (ResolvedDependency rd : dependencies) { + if ("io.quarkus.gradle.plugin".equals(rd.getModuleName())) { + return rd; + } else { + Set children = rd.getChildren(); + if (children != null) { + ResolvedDependency quarkusPluginDependency = findQuarkusPluginDependency(children); + if (quarkusPluginDependency != null) { + return quarkusPluginDependency; + } + } + } + } + return null; + } + + private void addGradlePluginDeps(GradleDevModeLauncher.Builder builder) { boolean foundQuarkusPlugin = false; Project prj = getProject(); while (prj != null && !foundQuarkusPlugin) { @@ -491,14 +405,15 @@ private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext if (firstLevelDeps.isEmpty()) { // TODO this looks weird } else { - for (ResolvedDependency rd : firstLevelDeps) { - if ("io.quarkus.gradle.plugin".equals(rd.getModuleName())) { - rd.getAllModuleArtifacts().stream() - .map(ResolvedArtifact::getFile) - .forEach(f -> addToClassPaths(classPathManifest, f)); - foundQuarkusPlugin = true; - break; - } + ResolvedDependency quarkusPluginDependency = findQuarkusPluginDependency(firstLevelDeps); + if (quarkusPluginDependency != null) { + quarkusPluginDependency.getAllModuleArtifacts().stream() + .map(ResolvedArtifact::getFile) + .forEach(f -> addToClassPaths(builder, f)); + + foundQuarkusPlugin = true; + + break; } } prj = prj.getParent(); @@ -521,7 +436,7 @@ private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext for (String cpElement : classpath.split(File.pathSeparator)) { final File f = new File(cpElement); if (f.exists()) { - addToClassPaths(classPathManifest, f); + addToClassPaths(builder, f); } } } @@ -531,12 +446,10 @@ private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext } } - private void addToClassPaths(StringBuilder classPathManifest, File file) { + private void addToClassPaths(GradleDevModeLauncher.Builder classPathManifest, File file) { if (filesIncludedInClasspath.add(file)) { getProject().getLogger().debug("Adding dependency {}", file); - - final URI uri = file.toPath().toAbsolutePath().toUri(); - classPathManifest.append(uri).append(" "); + classPathManifest.classpathEntry(file); } } } diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java new file mode 100644 index 0000000000000..17d2b72ea677d --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java @@ -0,0 +1,128 @@ +package io.quarkus.gradle.tasks; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.Set; +import java.util.function.Consumer; + +import org.gradle.api.GradleException; +import org.gradle.api.file.FileCollection; +import org.gradle.api.plugins.Convention; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.tasks.CompileClasspath; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.TaskAction; + +import io.quarkus.bootstrap.BootstrapException; +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.resolver.AppModelResolver; +import io.quarkus.deployment.CodeGenerator; + +public class QuarkusGenerateCode extends QuarkusTask { + + public static final String QUARKUS_GENERATED_SOURCES = "quarkus-generated-sources"; + public static final String QUARKUS_TEST_GENERATED_SOURCES = "quarkus-test-generated-sources"; + // TODO dynamically load generation provider, or make them write code directly in quarkus-generated-sources + public static final String[] CODE_GENERATION_PROVIDER = new String[] { "grpc" }; + + public static final String INIT_AND_RUN = "initAndRun"; + private Set sourcesDirectories; + private Consumer sourceRegistrar; + private boolean test = false; + + public QuarkusGenerateCode() { + super("Performs Quarkus pre-build preparations, such as sources generation"); + } + + /** + * Create a dependency on classpath resolution. This makes sure included build are build this task runs. + * + * @return resolved compile classpath + */ + @CompileClasspath + public FileCollection getClasspath() { + return QuarkusGradleUtils.getSourceSet(getProject(), SourceSet.MAIN_SOURCE_SET_NAME).getCompileClasspath(); + } + + @TaskAction + public void prepareQuarkus() { + getLogger().lifecycle("preparing quarkus application"); + + final AppArtifact appArtifact = extension().getAppArtifact(); + appArtifact.setPaths(QuarkusGradleUtils.getOutputPaths(getProject())); + + final AppModelResolver modelResolver = extension().getAppModelResolver(); + final Properties realProperties = getBuildSystemProperties(appArtifact); + + Path buildDir = getProject().getBuildDir().toPath(); + try (CuratedApplication appCreationContext = QuarkusBootstrap.builder() + .setBaseClassLoader(getClass().getClassLoader()) + .setAppModelResolver(modelResolver) + .setTargetDirectory(buildDir) + .setBaseName(extension().finalName()) + .setBuildSystemProperties(realProperties) + .setAppArtifact(appArtifact) + .setLocalProjectDiscovery(false) + .setIsolateDeployment(true) + .build().bootstrap()) { + + final Convention convention = getProject().getConvention(); + JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class); + if (javaConvention != null) { + final String generateSourcesDir = test ? QUARKUS_TEST_GENERATED_SOURCES : QUARKUS_GENERATED_SOURCES; + final SourceSet generatedSources = javaConvention.getSourceSets().findByName(generateSourcesDir); + List paths = new ArrayList<>(); + generatedSources.getOutput() + .filter(f -> f.getName().equals(generateSourcesDir)) + .forEach(f -> paths.add(f.toPath())); + if (paths.isEmpty()) { + throw new GradleException("Failed to create quarkus-generated-sources"); + } + + getLogger().debug("Will trigger preparing sources for source directory: {} buildDir: {}", + sourcesDirectories, getProject().getBuildDir().getAbsolutePath()); + + QuarkusClassLoader deploymentClassLoader = appCreationContext.createDeploymentClassLoader(); + Class codeGenerator = deploymentClassLoader.loadClass(CodeGenerator.class.getName()); + + Optional initAndRun = Arrays.stream(codeGenerator.getMethods()) + .filter(m -> m.getName().equals(INIT_AND_RUN)) + .findAny(); + if (!initAndRun.isPresent()) { + throw new GradleException("Failed to find " + INIT_AND_RUN + " method in " + CodeGenerator.class.getName()); + } + initAndRun.get().invoke(null, deploymentClassLoader, + sourcesDirectories, + paths.iterator().next(), + buildDir, + sourceRegistrar, + appCreationContext.getAppModel(), + realProperties); + + } + } catch (BootstrapException | IllegalAccessException | InvocationTargetException | ClassNotFoundException e) { + throw new GradleException("Failed to generate sources in the QuarkusPrepare task", e); + } + } + + public void setSourcesDirectories(Set sourcesDirectories) { + this.sourcesDirectories = sourcesDirectories; + } + + public void setSourceRegistrar(Consumer sourceRegistrar) { + this.sourceRegistrar = sourceRegistrar; + } + + public void setTest(boolean test) { + this.test = test; + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListExtensions.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListExtensions.java index 2c9407c9dde80..428f0f4d932ae 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListExtensions.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListExtensions.java @@ -1,10 +1,5 @@ package io.quarkus.gradle.tasks; -import static java.util.stream.Collectors.toList; - -import java.net.URL; -import java.util.List; - import org.gradle.api.GradleException; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Optional; @@ -12,18 +7,18 @@ import org.gradle.api.tasks.options.Option; import io.quarkus.devtools.commands.ListExtensions; -import io.quarkus.registry.DefaultExtensionRegistry; +import io.quarkus.devtools.project.QuarkusProject; public class QuarkusListExtensions extends QuarkusPlatformTask { private boolean all = true; + private boolean installed = false; + private boolean fromCli = false; private String format = "concise"; private String searchPattern; - private List registries; - @Input public boolean isAll() { return all; @@ -34,6 +29,26 @@ public void setAll(boolean all) { this.all = all; } + @Input + public boolean isFromCli() { + return fromCli; + } + + @Option(description = "List only installed extensions.", option = "fromCli") + public void setFromCli(boolean fromCli) { + this.fromCli = fromCli; + } + + @Input + public boolean isInstalled() { + return installed; + } + + @Option(description = "List only installed extensions.", option = "installed") + public void setInstalled(boolean installed) { + this.installed = installed; + } + @Optional @Input public String getFormat() { @@ -56,17 +71,6 @@ public void setSearchPattern(String searchPattern) { this.searchPattern = searchPattern; } - @Optional - @Input - public List getRegistries() { - return registries; - } - - @Option(description = "The extension registry URLs to be used", option = "registry") - public void setRegistries(List registry) { - this.registries = registry; - } - public QuarkusListExtensions() { super("Lists the available quarkus extensions"); } @@ -74,16 +78,13 @@ public QuarkusListExtensions() { @TaskAction public void listExtensions() { try { - ListExtensions listExtensions = new ListExtensions(getQuarkusProject()) - .all(isAll()) + final QuarkusProject quarkusProject = getQuarkusProject(installed); + ListExtensions listExtensions = new ListExtensions(quarkusProject) + .all(isFromCli() ? false : isAll()) + .fromCli(isFromCli()) .format(getFormat()) + .installed(isInstalled()) .search(getSearchPattern()); - if (registries != null && !registries.isEmpty()) { - List urls = registries.stream() - .map(QuarkusListExtensions::toURL) - .collect(toList()); - listExtensions.extensionRegistry(DefaultExtensionRegistry.fromURLs(urls)); - } listExtensions.execute(); } catch (Exception e) { throw new GradleException("Unable to list extensions", e); diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListPlatforms.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListPlatforms.java new file mode 100644 index 0000000000000..ae671f78c16c9 --- /dev/null +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusListPlatforms.java @@ -0,0 +1,52 @@ +package io.quarkus.gradle.tasks; + +import org.gradle.api.GradleException; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.options.Option; + +import io.quarkus.devtools.commands.ListPlatforms; +import io.quarkus.registry.Constants; + +public class QuarkusListPlatforms extends QuarkusPlatformTask { + + private boolean installed = false; + + public QuarkusListPlatforms() { + super("Lists the available quarkus platforms"); + } + + @Input + public boolean isInstalled() { + return installed; + } + + @Option(description = "List only installed platforms.", option = "installed") + public void setInstalled(boolean installed) { + this.installed = installed; + } + + @TaskAction + public void listExtensions() { + getProject().getLogger().info(""); + if (installed) { + getProject().getLogger().info("Imported Quarkus platforms:"); + importedPlatforms().forEach(coords -> { + final StringBuilder buf = new StringBuilder(); + buf.append(coords.getGroupId()).append(":") + .append(coords.getArtifactId().substring(0, + coords.getArtifactId().length() - Constants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX.length())) + .append("::pom:").append(coords.getVersion()); + messageWriter().info(buf.toString()); + }); + } else { + getProject().getLogger().info("Available Quarkus platforms:"); + try { + new ListPlatforms(getQuarkusProject(installed)).execute(); + } catch (Exception e) { + throw new GradleException("Unable to list platforms", e); + } + } + getProject().getLogger().info(""); + } +} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPlatformTask.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPlatformTask.java index f85ac2ebe0efd..9401798248ea2 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPlatformTask.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPlatformTask.java @@ -2,38 +2,115 @@ import java.net.MalformedURLException; import java.net.URL; +import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import org.gradle.api.GradleException; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ModuleDependency; import org.gradle.api.attributes.Category; import org.gradle.api.plugins.JavaPlugin; -import org.gradle.api.tasks.Internal; +import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; import io.quarkus.devtools.project.buildfile.BuildFile; -import io.quarkus.gradle.GradleBuildFileFromConnector; -import io.quarkus.platform.descriptor.CombinedQuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.resolver.json.QuarkusJsonPlatformDescriptorResolver; +import io.quarkus.devtools.project.buildfile.GradleGroovyProjectBuildFile; +import io.quarkus.devtools.project.buildfile.GradleKotlinProjectBuildFile; +import io.quarkus.maven.ArtifactCoords; +import io.quarkus.platform.tools.ToolsUtils; +import io.quarkus.registry.ExtensionCatalogResolver; +import io.quarkus.registry.catalog.ExtensionCatalog; public abstract class QuarkusPlatformTask extends QuarkusTask { + private static boolean isEnableRegistryClient() { + final String value = System.getProperty("enableRegistryClient"); + return value == null ? System.getProperties().containsKey("enableRegistryClient") : Boolean.parseBoolean(value); + } + QuarkusPlatformTask(String description) { super(description); } - protected QuarkusPlatformDescriptor platformDescriptor() { + private ExtensionCatalog extensionsCatalog(boolean limitExtensionsToImportedPlatforms, MessageWriter log) { + final List platforms = importedPlatforms(); + final ExtensionCatalogResolver catalogResolver = isEnableRegistryClient() ? QuarkusProjectHelper.getCatalogResolver(log) + : ExtensionCatalogResolver.empty(); + if (catalogResolver.hasRegistries()) { + try { + return limitExtensionsToImportedPlatforms ? catalogResolver.resolveExtensionCatalog(platforms) + : catalogResolver.resolveExtensionCatalog(quarkusCoreVersion()); + } catch (Exception e) { + throw new RuntimeException("Failed to resolve extensions catalog", e); + } + } + return ToolsUtils.mergePlatforms(platforms, extension().getAppModelResolver()); + } + + protected List importedPlatforms() { + final List bomDeps = boms(); + if (bomDeps.isEmpty()) { + throw new GradleException("No platforms detected in the project"); + } + + final Configuration boms = getProject().getConfigurations() + .detachedConfiguration(bomDeps.toArray(new org.gradle.api.artifacts.Dependency[0])); + final Set processedKeys = new HashSet<>(1); + + List platforms = new ArrayList<>(); + boms.getResolutionStrategy().eachDependency(d -> { + if (!d.getTarget().getName().endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX) + || !processedKeys.add(new AppArtifactKey(d.getTarget().getGroup(), d.getTarget().getName()))) { + return; + } + final ArtifactCoords platform = new ArtifactCoords(d.getTarget().getGroup(), d.getTarget().getName(), + d.getTarget().getVersion(), "json", d.getTarget().getVersion()); + platforms.add(platform); + }); + boms.getResolvedConfiguration(); + + if (platforms.isEmpty()) { + throw new RuntimeException("No Quarkus platforms found in the project"); + } + return platforms; + } + + protected String quarkusCoreVersion() { + final List bomDeps = boms(); + if (bomDeps.isEmpty()) { + throw new GradleException("No platforms detected in the project"); + } - final QuarkusJsonPlatformDescriptorResolver platformResolver = QuarkusJsonPlatformDescriptorResolver.newInstance() - .setArtifactResolver(extension().getAppModelResolver()) - .setMessageWriter(new GradleMessageWriter(getProject().getLogger())); + final Configuration boms = getProject().getConfigurations() + .detachedConfiguration(bomDeps.toArray(new org.gradle.api.artifacts.Dependency[0])); - List platforms = new ArrayList<>(2); + final AtomicReference quarkusVersionRef = new AtomicReference<>(); + boms.getResolutionStrategy().eachDependency(d -> { + if (quarkusVersionRef.get() == null && d.getTarget().getName().equals("quarkus-core") + && d.getTarget().getGroup().equals("io.quarkus")) { + quarkusVersionRef.set(d.getTarget().getVersion()); + } + }); + boms.getResolvedConfiguration(); + final String quarkusCoreVersion = quarkusVersionRef.get(); + if (quarkusCoreVersion == null) { + throw new IllegalStateException("Failed to determine the Quarkus core version for the project"); + } + return quarkusCoreVersion; + } + + private List boms() { final Configuration impl = getProject().getConfigurations().getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); + List boms = new ArrayList<>(); impl.getIncoming().getDependencies() .forEach(d -> { if (!(d instanceof ModuleDependency)) { @@ -41,44 +118,39 @@ protected QuarkusPlatformDescriptor platformDescriptor() { } final ModuleDependency module = (ModuleDependency) d; final Category category = module.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE); - if (category == null || !Category.ENFORCED_PLATFORM.equals(category.getName())) { - return; - } - try { - platforms - .add(platformResolver.resolveFromBom(module.getGroup(), module.getName(), module.getVersion())); - } catch (Exception e) { - // not a platform + if (category != null + && (Category.ENFORCED_PLATFORM.equals(category.getName()) + || Category.REGULAR_PLATFORM.equals(category.getName()))) { + boms.add(d); } }); + return boms; + } - if (platforms.isEmpty()) { - throw new GradleException("Failed to determine Quarkus platform for the project"); - } - - if (platforms.size() == 1) { - return platforms.get(0); - } + protected QuarkusProject getQuarkusProject(boolean limitExtensionsToImportedPlatforms) { - final CombinedQuarkusPlatformDescriptor.Builder builder = CombinedQuarkusPlatformDescriptor.builder(); - for (QuarkusPlatformDescriptor platform : platforms) { - builder.addPlatform(platform); - } - return builder.build(); - } + final GradleMessageWriter log = messageWriter(); + final ExtensionCatalog catalog = extensionsCatalog(limitExtensionsToImportedPlatforms, log); - @Internal - protected BuildFile getGradleBuildFile() { final Path projectDirPath = getProject().getProjectDir().toPath(); final Path rootProjectPath = getProject().getParent() != null ? getProject().getRootProject().getProjectDir().toPath() - : null; - return new GradleBuildFileFromConnector(projectDirPath, platformDescriptor(), - rootProjectPath); + : projectDirPath; + final BuildFile buildFile; + if (Files.exists(rootProjectPath.resolve("settings.gradle.kts")) + && Files.exists(projectDirPath.resolve("build.gradle.kts"))) { + buildFile = new GradleKotlinProjectBuildFile(getProject(), catalog); + } else if (Files.exists(rootProjectPath.resolve("settings.gradle")) + && Files.exists(projectDirPath.resolve("build.gradle"))) { + buildFile = new GradleGroovyProjectBuildFile(getProject(), catalog); + } else { + throw new GradleException( + "Mixed DSL is not supported. Both build and settings file need to use either Kotlin or Groovy DSL"); + } + return QuarkusProjectHelper.getProject(getProject().getProjectDir().toPath(), catalog, buildFile, log); } - @Internal - protected QuarkusProject getQuarkusProject() { - return QuarkusProject.of(getProject().getProjectDir().toPath(), platformDescriptor(), getGradleBuildFile()); + protected GradleMessageWriter messageWriter() { + return new GradleMessageWriter(getProject().getLogger()); } protected static URL toURL(String url) { diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPrepare.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPrepare.java deleted file mode 100644 index 69b7d55be7149..0000000000000 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusPrepare.java +++ /dev/null @@ -1,111 +0,0 @@ -package io.quarkus.gradle.tasks; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.Set; -import java.util.function.Consumer; - -import org.gradle.api.GradleException; -import org.gradle.api.plugins.Convention; -import org.gradle.api.plugins.JavaPluginConvention; -import org.gradle.api.tasks.SourceSet; -import org.gradle.api.tasks.TaskAction; - -import io.quarkus.bootstrap.BootstrapException; -import io.quarkus.bootstrap.app.CuratedApplication; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.bootstrap.classloading.QuarkusClassLoader; -import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.bootstrap.resolver.AppModelResolver; -import io.quarkus.deployment.CodeGenerator; - -public class QuarkusPrepare extends QuarkusTask { - - public static final String INIT_AND_RUN = "initAndRun"; - private Set sourcesDirectories; - private Consumer sourceRegistrar; - private boolean test = false; - - public QuarkusPrepare() { - super("Performs Quarkus pre-build preparations, such as sources generation"); - } - - @TaskAction - public void prepareQuarkus() { - getLogger().lifecycle("preparing quarkus application"); - - final AppArtifact appArtifact = extension().getAppArtifact(); - appArtifact.setPaths(QuarkusGradleUtils.getOutputPaths(getProject())); - - final AppModelResolver modelResolver = extension().getAppModelResolver(); - final Properties realProperties = getBuildSystemProperties(appArtifact); - - Path buildDir = getProject().getBuildDir().toPath(); - try (CuratedApplication appCreationContext = QuarkusBootstrap.builder() - .setBaseClassLoader(getClass().getClassLoader()) - .setAppModelResolver(modelResolver) - .setTargetDirectory(buildDir) - .setBaseName(extension().finalName()) - .setBuildSystemProperties(realProperties) - .setAppArtifact(appArtifact) - .setLocalProjectDiscovery(false) - .setIsolateDeployment(true) - .build().bootstrap()) { - - final Convention convention = getProject().getConvention(); - JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class); - if (javaConvention != null) { - String generateSourcesDir = test ? "quarkus-test-generated-sources" : "quarkus-generated-sources"; - final SourceSet generatedSources = javaConvention.getSourceSets().create(generateSourcesDir); - generatedSources.getOutput().dir(generateSourcesDir); - List paths = new ArrayList<>(); - generatedSources.getOutput() - .filter(f -> f.getName().equals(generateSourcesDir)) - .forEach(f -> paths.add(f.toPath())); - if (paths.isEmpty()) { - throw new GradleException("Failed to create quarkus-generated-sources"); - } - - getLogger().debug("Will trigger preparing sources for source directory: {} buildDir: {}", - sourcesDirectories, getProject().getBuildDir().getAbsolutePath()); - - QuarkusClassLoader deploymentClassLoader = appCreationContext.createDeploymentClassLoader(); - Class codeGenerator = deploymentClassLoader.loadClass(CodeGenerator.class.getName()); - - Optional initAndRun = Arrays.stream(codeGenerator.getMethods()) - .filter(m -> m.getName().equals(INIT_AND_RUN)) - .findAny(); - if (!initAndRun.isPresent()) { - throw new GradleException("Failed to find " + INIT_AND_RUN + " method in " + CodeGenerator.class.getName()); - } - initAndRun.get().invoke(null, deploymentClassLoader, - sourcesDirectories, - paths.iterator().next(), - buildDir, - sourceRegistrar, - appCreationContext.getAppModel()); - - } - } catch (BootstrapException | IllegalAccessException | InvocationTargetException | ClassNotFoundException e) { - throw new GradleException("Failed to generate sources in the QuarkusPrepare task", e); - } - } - - public void setSourcesDirectories(Set sourcesDirectories) { - this.sourcesDirectories = sourcesDirectories; - } - - public void setSourceRegistrar(Consumer sourceRegistrar) { - this.sourceRegistrar = sourceRegistrar; - } - - public void setTest(boolean test) { - this.test = test; - } -} diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoteDev.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoteDev.java index bbd135e97ce15..57843b79e0ff3 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoteDev.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoteDev.java @@ -1,17 +1,12 @@ package io.quarkus.gradle.tasks; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.deployment.dev.DevModeContext; -import io.quarkus.deployment.dev.IsolatedRemoteDevModeMain; - public class QuarkusRemoteDev extends QuarkusDev { public QuarkusRemoteDev() { super("Remote development mode: enables hot deployment on remote JVM with background compilation"); } - protected void modifyDevModeContext(DevModeContext devModeContext) { - devModeContext.setMode(QuarkusBootstrap.Mode.PROD); - devModeContext.setAlternateEntryPoint(IsolatedRemoteDevModeMain.class.getName()); + protected void modifyDevModeContext(GradleDevModeLauncher.Builder builder) { + builder.remoteDev(true); } } diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoveExtension.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoveExtension.java index 1adf369f5c415..0d11fa2f81c67 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoveExtension.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusRemoveExtension.java @@ -39,7 +39,7 @@ public void removeExtension() { .map(String::trim) .collect(toSet()); try { - new RemoveExtensions(getQuarkusProject()) + new RemoveExtensions(getQuarkusProject(true)) .extensions(extensionsSet) .execute(); } catch (Exception e) { diff --git a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java index d040a63ec5216..257ce171183a4 100644 --- a/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java +++ b/devtools/gradle/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java @@ -6,7 +6,7 @@ import org.gradle.api.DefaultTask; import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.gradle.QuarkusPluginExtension; +import io.quarkus.gradle.extension.QuarkusPluginExtension; public abstract class QuarkusTask extends DefaultTask { diff --git a/devtools/gradle/src/test/java/io/quarkus/gradle/GradleBuildFileTest.java b/devtools/gradle/src/test/java/io/quarkus/gradle/GradleBuildFileTest.java deleted file mode 100644 index ea7457f191337..0000000000000 --- a/devtools/gradle/src/test/java/io/quarkus/gradle/GradleBuildFileTest.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.quarkus.gradle; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import org.apache.maven.model.Dependency; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; - -class GradleBuildFileTest { - - private static GradleBuildFileFromConnector buildFile; - - @BeforeAll - public static void beforeAll() throws URISyntaxException { - URL url = GradleBuildFileTest.class.getClassLoader().getResource("gradle-project"); - URI uri = url.toURI(); - Path gradleProjectPath = Paths.get(uri); - final QuarkusPlatformDescriptor descriptor = Mockito.mock(QuarkusPlatformDescriptor.class); - buildFile = new GradleBuildFileFromConnector(gradleProjectPath, descriptor); - } - - @Test - void testGetDependencies() throws IOException { - List dependencies = buildFile.getDependencies(); - assertThat(dependencies).isNotEmpty(); - List depsString = new ArrayList<>(); - for (Iterator depIter = dependencies.iterator(); depIter.hasNext();) { - Dependency dependency = depIter.next(); - String managementKey = dependency.getManagementKey(); - if (dependency.getVersion() != null && !dependency.getVersion().isEmpty()) { - managementKey += ':' + dependency.getVersion(); - } - depsString.add(managementKey); - } - assertThat(depsString).contains("io.quarkus:quarkus-jsonp:jar:0.23.2", - "io.quarkus:quarkus-jsonb:jar:0.23.2", "io.quarkus:quarkus-resteasy:jar:0.23.2"); - } - - @Test - void testGetProperty() throws IOException { - assertNull(buildFile.getProperty("toto")); - assertEquals("0.23.2", buildFile.getProperty("quarkusVersion")); - } - - @Test - void testGetTestClosure() throws IOException { - assertNull(buildFile.getProperty("test")); - } - -} diff --git a/devtools/gradle/src/test/java/io/quarkus/gradle/QuarkusPluginTest.java b/devtools/gradle/src/test/java/io/quarkus/gradle/QuarkusPluginTest.java index 337df0ded4573..6c6800cb411e8 100644 --- a/devtools/gradle/src/test/java/io/quarkus/gradle/QuarkusPluginTest.java +++ b/devtools/gradle/src/test/java/io/quarkus/gradle/QuarkusPluginTest.java @@ -4,6 +4,9 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.io.File; +import java.util.Set; + import org.gradle.api.Project; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.JavaPlugin; @@ -11,6 +14,8 @@ import org.gradle.testfixtures.ProjectBuilder; import org.junit.jupiter.api.Test; +import io.quarkus.gradle.extension.QuarkusPluginExtension; + public class QuarkusPluginTest { @Test @@ -53,4 +58,22 @@ public void shouldMakeQuarkusDevAndQuarkusBuildDependOnClassesTask() { assertThat(tasks.getByName(QuarkusPlugin.QUARKUS_DEV_TASK_NAME).getDependsOn()) .contains(tasks.getByName(JavaPlugin.CLASSES_TASK_NAME)); } + + @Test + public void shouldReturnMutlipleOutputSourceDirectories() { + Project project = ProjectBuilder.builder().build(); + project.getPluginManager().apply(QuarkusPlugin.ID); + project.getPluginManager().apply("java"); + project.getPluginManager().apply("scala"); + + final QuarkusPluginExtension extension = project.getExtensions().getByType(QuarkusPluginExtension.class); + + final Set outputSourceDirs = extension.combinedOutputSourceDirs(); + assertThat(outputSourceDirs).hasSize(4); + assertThat(outputSourceDirs).contains(new File(project.getBuildDir(), "classes/java/main"), + new File(project.getBuildDir(), "classes/java/test"), + new File(project.getBuildDir(), "classes/scala/main"), + new File(project.getBuildDir(), "classes/scala/test")); + + } } diff --git a/devtools/gradle/src/test/resources/gradle-project/build.gradle b/devtools/gradle/src/test/resources/gradle-project/build.gradle deleted file mode 100644 index 2e6db65bd7396..0000000000000 --- a/devtools/gradle/src/test/resources/gradle-project/build.gradle +++ /dev/null @@ -1,33 +0,0 @@ - - -plugins { - id 'java' -} - -repositories { - mavenLocal() - mavenCentral() -} - -dependencies { - implementation 'io.quarkus:quarkus-jsonp' - implementation 'io.quarkus:quarkus-jsonb' - constraints { - implementation("io.quarkus:quarkus-jsonb:0.10.0") { - because("to test constraints") - } - } - implementation enforcedPlatform("io.quarkus:quarkus-bom:${quarkusVersion}") - implementation 'io.quarkus:quarkus-resteasy' - - testImplementation 'io.quarkus:quarkus-junit5' - testImplementation 'io.rest-assured:rest-assured' -} - -test { - dependsOn 'cleanTest' - useJUnitPlatform() - - // @NativeImageTest and JVM mode tests can't be mixed in the same run - forkEvery 1 -} diff --git a/devtools/gradle/src/test/resources/gradle-project/gradle.properties b/devtools/gradle/src/test/resources/gradle-project/gradle.properties deleted file mode 100644 index cc038fb4df04c..0000000000000 --- a/devtools/gradle/src/test/resources/gradle-project/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -quarkusVersion = 0.23.2 \ No newline at end of file diff --git a/devtools/gradle/src/test/resources/multi-module-project/build.gradle b/devtools/gradle/src/test/resources/multi-module-project/build.gradle deleted file mode 100644 index c154c33071814..0000000000000 --- a/devtools/gradle/src/test/resources/multi-module-project/build.gradle +++ /dev/null @@ -1,19 +0,0 @@ - -subprojects { - apply plugin: 'java' - - repositories { - mavenLocal() - mavenCentral() - } - - dependencies { - implementation enforcedPlatform("io.quarkus:quarkus-bom:999-SNAPSHOT") - implementation 'io.quarkus:quarkus-resteasy' - testImplementation 'io.quarkus:quarkus-junit5' - } - -} - -group 'org.acme' -version '1.0-SNAPSHOT' diff --git a/devtools/gradle/src/test/resources/multi-module-project/settings.gradle b/devtools/gradle/src/test/resources/multi-module-project/settings.gradle deleted file mode 100644 index 60c19d80c56b0..0000000000000 --- a/devtools/gradle/src/test/resources/multi-module-project/settings.gradle +++ /dev/null @@ -1,13 +0,0 @@ -pluginManagement { - repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() - } - plugins { - id 'io.quarkus' version "${quarkusPluginVersion}" - } -} -rootProject.name='mutli-module-project' - -include 'common', 'application' \ No newline at end of file diff --git a/devtools/gradle/src/test/resources/simple-module-project/build.gradle b/devtools/gradle/src/test/resources/simple-module-project/build.gradle deleted file mode 100644 index 3aed0a675a9b6..0000000000000 --- a/devtools/gradle/src/test/resources/simple-module-project/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -plugins { - id 'java' - id 'io.quarkus' -} - -repositories { - mavenLocal() - mavenCentral() -} - -dependencies { - implementation enforcedPlatform("io.quarkus:quarkus-bom:999-SNAPSHOT") - implementation 'io.quarkus:quarkus-resteasy' -} - -group 'org.acme' -version '1.0-SNAPSHOT' - - -compileTestJava { - options.encoding = 'UTF-8' -} diff --git a/devtools/gradle/src/test/resources/simple-module-project/settings.gradle b/devtools/gradle/src/test/resources/simple-module-project/settings.gradle deleted file mode 100644 index 775892b4374e6..0000000000000 --- a/devtools/gradle/src/test/resources/simple-module-project/settings.gradle +++ /dev/null @@ -1,11 +0,0 @@ -pluginManagement { - repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() - } - plugins { - id 'io.quarkus' version "${quarkusPluginVersion}" - } -} -rootProject.name='simple-module-project' \ No newline at end of file diff --git a/devtools/maven/pom.xml b/devtools/maven/pom.xml index 6b20bff6c6dcf..cc5d536ed0996 100644 --- a/devtools/maven/pom.xml +++ b/devtools/maven/pom.xml @@ -38,7 +38,7 @@ io.quarkus - quarkus-platform-descriptor-resolver-json + quarkus-devtools-common org.apache.maven @@ -116,12 +116,6 @@ - - - jline - jline - - org.freemarker freemarker @@ -142,18 +136,12 @@ org.twdata.maven mojo-executor - 2.3.0 - - - org.slf4j - slf4j-simple - - + 2.3.1 org.jboss.slf4j - slf4j-jboss-logging + slf4j-jboss-logmanager diff --git a/devtools/maven/src/main/java/io/quarkus/maven/AddExtensionMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/AddExtensionMojo.java index 49cf36a28e9f6..1983653ef206d 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/AddExtensionMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/AddExtensionMojo.java @@ -2,10 +2,8 @@ import static java.util.stream.Collectors.toSet; -import java.net.URL; import java.util.Arrays; import java.util.HashSet; -import java.util.List; import java.util.Set; import org.apache.commons.lang3.StringUtils; @@ -15,9 +13,8 @@ import io.quarkus.devtools.commands.AddExtensions; import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.MessageWriter; -import io.quarkus.registry.DefaultExtensionRegistry; /** * Allow adding an extension to an existing pom.xml file. @@ -40,12 +37,6 @@ public class AddExtensionMojo extends QuarkusProjectMojoBase { @Parameter(property = "extension") String extension; - /** - * The URL where the registry is. - */ - @Parameter(property = "registry", alias = "quarkus.extension.registry") - List registries; - @Override protected void validateParameters() throws MojoExecutionException { if ((StringUtils.isBlank(extension) && (extensions == null || extensions.isEmpty())) // None are set @@ -69,9 +60,6 @@ public void doExecute(final QuarkusProject quarkusProject, final MessageWriter l try { AddExtensions addExtensions = new AddExtensions(quarkusProject) .extensions(ext.stream().map(String::trim).collect(toSet())); - if (registries != null && !registries.isEmpty()) { - addExtensions.extensionRegistry(DefaultExtensionRegistry.fromURLs(registries)); - } final QuarkusCommandOutcome outcome = addExtensions.execute(); if (!outcome.isSuccess()) { throw new MojoExecutionException("Unable to add extensions"); diff --git a/devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java index 6d00bd0193e19..2d0411859a406 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java @@ -6,12 +6,12 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.Properties; +import java.util.Optional; +import java.util.stream.Stream; import org.apache.maven.artifact.Artifact; -import org.apache.maven.model.Resource; -import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; @@ -19,214 +19,110 @@ import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; -import org.eclipse.aether.RepositorySystem; -import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.repository.RemoteRepository; import io.quarkus.bootstrap.app.AugmentAction; import io.quarkus.bootstrap.app.AugmentResult; import io.quarkus.bootstrap.app.CuratedApplication; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.bootstrap.model.AppArtifactKey; -import io.quarkus.bootstrap.model.PathsCollection; -import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; +import io.quarkus.bootstrap.util.IoUtils; +import io.quarkus.runtime.configuration.ProfileManager; /** - * Build the application. - *

    - * You can build a native application runner with {@code native-image} - * - * @author Alexey Loubyansky + * Builds the Quarkus application. */ @Mojo(name = "build", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) -public class BuildMojo extends AbstractMojo { +public class BuildMojo extends QuarkusBootstrapMojo { - protected static final String QUARKUS_PACKAGE_UBER_JAR = "quarkus.package.uber-jar"; - /** - * The entry point to Aether, i.e. the component doing all the work. - * - * @component - */ - @Component - private RepositorySystem repoSystem; + private static final String PACKAGE_TYPE_PROP = "quarkus.package.type"; + private static final String NATIVE_PROFILE_NAME = "native"; + private static final String NATIVE_PACKAGE_TYPE = "native"; @Component private MavenProjectHelper projectHelper; - /** - * The current repository/network configuration of Maven. - * - * @parameter default-value="${repositorySystemSession}" - * @readonly - */ - @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) - private RepositorySystemSession repoSession; - - /** - * The project's remote repositories to use for the resolution of artifacts and their dependencies. - * - * @parameter default-value="${project.remoteProjectRepositories}" - * @readonly - */ - @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) - private List repos; - /** * The project's remote repositories to use for the resolution of plugins and their dependencies. - * - * @parameter default-value="${project.remotePluginRepositories}" - * @readonly */ @Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true, required = true) private List pluginRepos; - /** - * The directory for compiled classes. - */ - @Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}") - @Deprecated - private File outputDirectory; - - @Parameter(defaultValue = "${project}", readonly = true, required = true) - protected MavenProject project; - /** * The directory for generated source files. */ @Parameter(defaultValue = "${project.build.directory}/generated-sources") private File generatedSourcesDirectory; - @Parameter(defaultValue = "${project.build.directory}") - private File buildDir; - - @Parameter(defaultValue = "${project.build.finalName}") - private String finalName; - - @Parameter(property = "uberJar", defaultValue = "false") - private boolean uberJar; - /** - * When using the uberJar option, this array specifies entries that should - * be excluded from the final jar. The entries are relative to the root of - * the file. An example of this configuration could be: - *

    -     * <configuration>
    -     *   <uberJar>true</uberJar>
    -     *   <ignoredEntries>
    -     *     <ignoredEntry>META-INF/BC2048KE.SF</ignoredEntry>
    -     *     <ignoredEntry>META-INF/BC2048KE.DSA</ignoredEntry>
    -     *     <ignoredEntry>META-INF/BC1024KE.SF</ignoredEntry>
    -     *     <ignoredEntry>META-INF/BC1024KE.DSA</ignoredEntry>
    -     *   </ignoredEntries>
    -     * </configuration>
    -     * 
    + * Skips the execution of this mojo */ - @Parameter(property = "ignoredEntries") - private String[] ignoredEntries; - - /** Skip the execution of this mojo */ @Parameter(defaultValue = "false", property = "quarkus.build.skip") private boolean skip = false; - public BuildMojo() { - MojoLogger.logSupplier = this::getLog; - } + @Deprecated + @Parameter(property = "skipOriginalJarRename") + boolean skipOriginalJarRename; @Override - public void execute() throws MojoExecutionException { - + protected boolean beforeExecute() throws MojoExecutionException { if (skip) { getLog().info("Skipping Quarkus build"); - return; + return false; } - if (project.getPackaging().equals("pom")) { + if (mavenProject().getPackaging().equals("pom")) { getLog().info("Type of the artifact is POM, skipping build goal"); - return; + return false; } - if (!project.getArtifact().getArtifactHandler().getExtension().equals("jar")) { + if (!mavenProject().getArtifact().getArtifactHandler().getExtension().equals("jar")) { throw new MojoExecutionException( - "The project artifact's extension is '" + project.getArtifact().getArtifactHandler().getExtension() + "The project artifact's extension is '" + mavenProject().getArtifact().getArtifactHandler().getExtension() + "' while this goal expects it be 'jar'"); } + return true; + } - boolean clear = false; - try { - - final Properties projectProperties = project.getProperties(); - final Properties effectiveProperties = new Properties(); - // quarkus. properties > ignoredEntries in pom.xml - if (ignoredEntries != null && ignoredEntries.length > 0) { - String joinedEntries = String.join(",", ignoredEntries); - effectiveProperties.setProperty("quarkus.package.user-configured-ignored-entries", joinedEntries); - } - for (String name : projectProperties.stringPropertyNames()) { - if (name.startsWith("quarkus.")) { - effectiveProperties.setProperty(name, projectProperties.getProperty(name)); - } - } - if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) { - System.setProperty(QUARKUS_PACKAGE_UBER_JAR, "true"); - clear = true; - } - effectiveProperties.putIfAbsent("quarkus.application.name", project.getArtifactId()); - effectiveProperties.putIfAbsent("quarkus.application.version", project.getVersion()); - - MavenArtifactResolver resolver = MavenArtifactResolver.builder() - .setWorkspaceDiscovery(false) - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos) - .build(); - - final Artifact projectArtifact = project.getArtifact(); - final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(), - projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(), - projectArtifact.getVersion()); + @Override + protected void doExecute() throws MojoExecutionException { - File projectFile = projectArtifact.getFile(); - if (projectFile == null) { - projectFile = new File(project.getBuild().getOutputDirectory()); - if (!projectFile.exists()) { - if (hasSources(project)) { - throw new MojoExecutionException("Project " + project.getArtifact() + " has not been compiled yet"); - } - if (!projectFile.mkdirs()) { - throw new MojoExecutionException("Failed to create the output dir " + projectFile); - } + try { + boolean clearPackageTypeSysProp = false; + // Essentially what this does is to enable the native package type even if a different package type is set + // in application properties. This is done to preserve what users expect to happen when + // they execute "mvn package -Dnative" even if quarkus.package.type has been set in application.properties + if (!System.getProperties().containsKey(PACKAGE_TYPE_PROP) + && isNativeProfileEnabled(mavenProject())) { + Object packageTypeProp = mavenProject().getProperties().get(PACKAGE_TYPE_PROP); + String packageType = NATIVE_PACKAGE_TYPE; + if (packageTypeProp != null) { + packageType = packageTypeProp.toString(); } + System.setProperty(PACKAGE_TYPE_PROP, packageType); + clearPackageTypeSysProp = true; } - appArtifact.setPaths(PathsCollection.of(projectFile.toPath())); - - QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder() - .setAppArtifact(appArtifact) - .setMavenArtifactResolver(resolver) - .setBaseClassLoader(BuildMojo.class.getClassLoader()) - .setBuildSystemProperties(effectiveProperties) - .setLocalProjectDiscovery(false) - .setProjectRoot(project.getBasedir().toPath()) - .setBaseName(finalName) - .setTargetDirectory(buildDir.toPath()); - - for (MavenProject project : project.getCollectedProjects()) { - builder.addLocalArtifact(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null, - project.getArtifact().getArtifactHandler().getExtension())); + // Set the build profile based on the value of the project property ProfileManager.QUARKUS_PROFILE_PROP + // if and only if it has not already been set using the corresponding system property + // or environment variable. + final Object profile = mavenProject().getProperties().get(ProfileManager.QUARKUS_PROFILE_PROP); + if (profile != null && System.getProperty(ProfileManager.QUARKUS_PROFILE_PROP) == null + && System.getenv(ProfileManager.QUARKUS_PROFILE_ENV) == null) { + System.setProperty(ProfileManager.QUARKUS_PROFILE_PROP, profile.toString()); } - - try (CuratedApplication curatedApplication = builder - .build().bootstrap()) { + try (CuratedApplication curatedApplication = bootstrapApplication()) { AugmentAction action = curatedApplication.createAugmentor(); AugmentResult result = action.createProductionApplication(); - Artifact original = project.getArtifact(); + Artifact original = mavenProject().getArtifact(); if (result.getJar() != null) { - if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) { - final Path standardJar = curatedApplication.getAppModel().getAppArtifact().getPaths().getSinglePath(); + + if (!skipOriginalJarRename && result.getJar().isUberJar() + && result.getJar().getOriginalArtifact() != null) { + final Path standardJar = result.getJar().getOriginalArtifact(); if (Files.exists(standardJar)) { + final Path renamedOriginal = standardJar.getParent().toAbsolutePath() + .resolve(standardJar.getFileName() + ".original"); try { - Files.deleteIfExists(result.getJar().getOriginalArtifact()); - Files.move(standardJar, result.getJar().getOriginalArtifact()); + IoUtils.recursiveDelete(renamedOriginal); + Files.move(standardJar, renamedOriginal); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -234,28 +130,35 @@ public void execute() throws MojoExecutionException { } } if (result.getJar().isUberJar()) { - projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), "runner"); + projectHelper.attachArtifact(mavenProject(), result.getJar().getPath().toFile(), + result.getJar().getClassifier()); } } + } finally { + if (clearPackageTypeSysProp) { + System.clearProperty(PACKAGE_TYPE_PROP); + } } } catch (Exception e) { throw new MojoExecutionException("Failed to build quarkus application", e); - } finally { - if (clear) { - System.clearProperty(QUARKUS_PACKAGE_UBER_JAR); - } } } - private static boolean hasSources(MavenProject project) { - if (new File(project.getBuild().getSourceDirectory()).exists()) { + private boolean isNativeProfileEnabled(MavenProject mavenProject) { + // gotcha: mavenProject.getActiveProfiles() does not always contain all active profiles (sic!), + // but getInjectedProfileIds() does (which has to be "flattened" first) + Stream activeProfileIds = mavenProject.getInjectedProfileIds().values().stream().flatMap(List::stream); + if (activeProfileIds.anyMatch(NATIVE_PROFILE_NAME::equalsIgnoreCase)) { return true; } - for (Resource r : project.getBuild().getResources()) { - if (new File(r.getDirectory()).exists()) { - return true; - } - } - return false; + // recurse into parent (if available) + return Optional.ofNullable(mavenProject.getParent()).map(this::isNativeProfileEnabled).orElse(false); } + + @Override + public void setLog(Log log) { + super.setLog(log); + MojoLogger.delegate = log; + } + } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CodeGenMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CodeGenMojo.java deleted file mode 100644 index dc812e5b6131e..0000000000000 --- a/devtools/maven/src/main/java/io/quarkus/maven/CodeGenMojo.java +++ /dev/null @@ -1,152 +0,0 @@ -package io.quarkus.maven; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; -import java.util.Properties; -import java.util.function.Consumer; - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.plugin.AbstractMojo; -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; -import org.apache.maven.plugins.annotations.Component; -import org.apache.maven.plugins.annotations.LifecyclePhase; -import org.apache.maven.plugins.annotations.Mojo; -import org.apache.maven.plugins.annotations.Parameter; -import org.apache.maven.plugins.annotations.ResolutionScope; -import org.apache.maven.project.MavenProject; -import org.eclipse.aether.RepositorySystem; -import org.eclipse.aether.RepositorySystemSession; -import org.eclipse.aether.repository.RemoteRepository; - -import io.quarkus.bootstrap.app.CuratedApplication; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.bootstrap.classloading.QuarkusClassLoader; -import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.bootstrap.prebuild.CodeGenFailureException; -import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; -import io.quarkus.deployment.CodeGenerator; -import io.quarkus.deployment.codegen.CodeGenData; - -@Mojo(name = "prepare", defaultPhase = LifecyclePhase.GENERATE_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) -public class CodeGenMojo extends AbstractMojo { - - @Parameter(defaultValue = "${project.build.directory}") - private File buildDir; - - @Parameter(defaultValue = "${project}", readonly = true, required = true) - protected MavenProject project; - - @Component - private RepositorySystem repoSystem; - - @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) - private RepositorySystemSession repoSession; - - @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) - private List repos; - - /** Skip the execution of this mojo */ - @Parameter(defaultValue = "false", property = "quarkus.prepare.skip") - private boolean skipSourceGeneration = false; - - @Override - public void execute() throws MojoExecutionException, MojoFailureException { - String projectDir = project.getBasedir().getAbsolutePath(); - Path sourcesDir = Paths.get(projectDir, "src", "main"); - doExecute(sourcesDir, path -> project.addCompileSourceRoot(path.toString()), false); - } - - void doExecute(Path sourcesDir, - Consumer sourceRegistrar, - boolean test) throws MojoFailureException, MojoExecutionException { - if (project.getPackaging().equals("pom")) { - getLog().info("Type of the artifact is POM, skipping build goal"); - return; - } - if (skipSourceGeneration) { - getLog().info("Skipping quarkus:" + (test ? "prepare-tests" : "prepare") + " (Quarkus code generation)"); - return; - } - - try { - - final Properties projectProperties = project.getProperties(); - final Properties realProperties = new Properties(); - for (String name : projectProperties.stringPropertyNames()) { - if (name.startsWith("quarkus.")) { - realProperties.setProperty(name, projectProperties.getProperty(name)); - } - } - - MavenArtifactResolver resolver = MavenArtifactResolver.builder() - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos) - .build(); - - final AppArtifact appArtifact = getAppArtifact(); - - CuratedApplication curatedApplication = QuarkusBootstrap.builder() - .setAppArtifact(appArtifact) - .setProjectRoot(project.getBasedir().toPath()) - .setMavenArtifactResolver(resolver) - .setBaseClassLoader(BuildMojo.class.getClassLoader()) - .setBuildSystemProperties(realProperties) - .setLocalProjectDiscovery(false) - .setTargetDirectory(buildDir.toPath()) - .build().bootstrap(); - - Path generatedSourcesDir = test - ? buildDir.toPath().resolve("generated-test-sources") - : buildDir.toPath().resolve("generated-sources"); - - sourceRegistrar.accept(generatedSourcesDir); - - QuarkusClassLoader deploymentClassLoader = curatedApplication.createDeploymentClassLoader(); - List codeGens = CodeGenerator.init(deploymentClassLoader, - Collections.singleton(sourcesDir), - generatedSourcesDir, - buildDir.toPath(), - sourceRegistrar); - - for (CodeGenData codeGen : codeGens) { - CodeGenerator.trigger( - deploymentClassLoader, - codeGen, - curatedApplication.getAppModel()); - } - } catch (CodeGenFailureException any) { - throw new MojoFailureException("Prepare phase of the quarkus-maven-plugin failed: " + any.getMessage(), any); - } catch (Exception any) { - throw new MojoExecutionException("Prepare phase of the quarkus-maven-plugin threw an error: " + any.getMessage(), - any); - } - } - - private AppArtifact getAppArtifact() throws MojoExecutionException { - final Path classesDir = Paths.get(project.getBuild().getOutputDirectory()); - if (!Files.exists(classesDir)) { - if (getLog().isDebugEnabled()) { - getLog().debug("Creating empty " + classesDir + " just to be able to resolve the project's artifact"); - } - try { - Files.createDirectories(classesDir); - } catch (IOException e) { - throw new MojoExecutionException("Failed to create " + classesDir); - } - } - - final Artifact projectArtifact = project.getArtifact(); - final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(), - projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(), - projectArtifact.getVersion()); - appArtifact.setPath(classesDir); - return appArtifact; - } -} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CodeGenTestMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CodeGenTestMojo.java deleted file mode 100644 index ee8faddec0a49..0000000000000 --- a/devtools/maven/src/main/java/io/quarkus/maven/CodeGenTestMojo.java +++ /dev/null @@ -1,20 +0,0 @@ -package io.quarkus.maven; - -import java.nio.file.Path; -import java.nio.file.Paths; - -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; -import org.apache.maven.plugins.annotations.LifecyclePhase; -import org.apache.maven.plugins.annotations.Mojo; -import org.apache.maven.plugins.annotations.ResolutionScope; - -@Mojo(name = "prepare-tests", defaultPhase = LifecyclePhase.GENERATE_TEST_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME) -public class CodeGenTestMojo extends CodeGenMojo { - @Override - public void execute() throws MojoExecutionException, MojoFailureException { - String projectDir = project.getBasedir().getAbsolutePath(); - Path testSources = Paths.get(projectDir, "src", "test"); - doExecute(testSources, path -> project.addTestCompileSourceRoot(path.toString()), true); - } -} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionLegacyMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionLegacyMojo.java new file mode 100644 index 0000000000000..f2279d13faa52 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionLegacyMojo.java @@ -0,0 +1,1289 @@ +package io.quarkus.maven; + +import java.io.File; +import java.io.IOException; +import java.io.Reader; +import java.io.Writer; +import java.lang.reflect.Field; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Stack; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import javax.lang.model.SourceVersion; + +import org.apache.maven.model.Model; +import org.apache.maven.model.io.xpp3.MavenXpp3Reader; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.Component; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; +import org.apache.maven.project.MavenProject; +import org.codehaus.plexus.util.xml.pull.XmlPullParserException; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.artifact.DefaultArtifact; +import org.eclipse.aether.graph.Dependency; +import org.eclipse.aether.repository.RemoteRepository; +import org.eclipse.aether.resolution.ArtifactDescriptorResult; +import org.jboss.logging.Logger; + +import freemarker.cache.ClassTemplateLoader; +import freemarker.cache.FileTemplateLoader; +import freemarker.cache.MultiTemplateLoader; +import freemarker.cache.TemplateLoader; +import freemarker.template.Configuration; +import freemarker.template.Template; +import freemarker.template.TemplateException; +import freemarker.template.TemplateExceptionHandler; +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; +import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; +import io.quarkus.maven.utilities.MojoUtils; +import io.quarkus.maven.utilities.PomTransformer; +import io.quarkus.maven.utilities.PomTransformer.Gavtcs; +import io.quarkus.maven.utilities.PomTransformer.Transformation; + +/** + * LEGACY COMMAND + * + * Creates a triple of stub Maven modules (Parent, Runtime and Deployment) to implement a new + * Quarkus Extension. + * + *

    Adding into an established source tree

    + *

    + * If this Mojo is executed in a directory that contains a {@code pom.xml} file with packaging {@code pom} the newly + * created Parent module is added as a child module to the existing {@code pom.xml} file. + * + *

    Creating a source tree from scratch

    + *

    + * Executing this Mojo in an empty directory is not supported yet. + */ +@Mojo(name = "create-extension-legacy", requiresProject = false) +public class CreateExtensionLegacyMojo extends AbstractMojo { + + private static final String QUOTED_DOLLAR = Matcher.quoteReplacement("$"); + + private static final Logger log = Logger.getLogger(CreateExtensionLegacyMojo.class); + + private static final Pattern BRACKETS_PATTERN = Pattern.compile("[()]+"); + private static final String CLASSPATH_PREFIX = "classpath:"; + private static final String FILE_PREFIX = "file:"; + + private static final String QUARKUS_VERSION_PROP = "quarkus.version"; + + static final String DEFAULT_ENCODING = "utf-8"; + static final String DEFAULT_QUARKUS_VERSION = "@{" + QUARKUS_VERSION_PROP + "}"; + static final String QUARKUS_VERSION_POM_EXPR = "${" + QUARKUS_VERSION_PROP + "}"; + static final String DEFAULT_BOM_ENTRY_VERSION = "@{project.version}"; + static final String DEFAULT_TEMPLATES_URI_BASE = "classpath:/create-extension-templates"; + static final String DEFAULT_NAME_SEGMENT_DELIMITER = " - "; + static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("@\\{([^\\}]+)\\}"); + + static final String PLATFORM_DEFAULT_GROUP_ID = "io.quarkus"; + static final String PLATFORM_DEFAULT_ARTIFACT_ID = "quarkus-bom"; + + static final String COMPILER_PLUGIN_VERSION_PROP = "compiler-plugin.version"; + static final String COMPILER_PLUGIN_VERSION_POM_EXPR = "${" + COMPILER_PLUGIN_VERSION_PROP + "}"; + static final String COMPILER_PLUGIN_DEFAULT_VERSION = "3.8.1"; + private static final String COMPILER_PLUGIN_GROUP_ID = "org.apache.maven.plugins"; + private static final String COMPILER_PLUGIN_ARTIFACT_ID = "maven-compiler-plugin"; + private static final String COMPILER_PLUGIN_KEY = COMPILER_PLUGIN_GROUP_ID + ":" + + COMPILER_PLUGIN_ARTIFACT_ID; + + /** + * Directory where the changes should be performed. Default is the current directory of the current Java process. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.basedir") + File basedir; + + /** + * The {@code groupId} of the Quarkus platform BOM. + */ + @Parameter(property = "platformGroupId", defaultValue = PLATFORM_DEFAULT_GROUP_ID) + String platformGroupId; + + /** + * The {@code artifactId} of the Quarkus platform BOM. + */ + @Parameter(property = "platformArtifactId", defaultValue = PLATFORM_DEFAULT_ARTIFACT_ID) + String platformArtifactId; + + /** + * The {@code groupId} for the newly created Maven modules. If {@code groupId} is left unset, the {@code groupId} + * from the {@code pom.xml} in the current directory will be used. Otherwise, an exception is thrown. + * + * @since 0.20.0 + */ + @Parameter(property = "groupId") + String groupId; + + /** + * This parameter was introduced to change the property of {@code groupId} parameter from {@code quarkus.groupId} to + * {@code groupId} + * + * @since 1.3.0 + */ + @Deprecated + @Parameter(property = "quarkus.groupId") + String deprecatedGroupId; + + /** + * {@code artifactId} of the runtime module. The {@code artifactId}s of the extension parent + * (${artifactId}-parent) and deployment (${artifactId}-deployment) modules will be based + * on this {@code artifactId} too. + *

    + * Optionally, this value can contain the {@link #artifactIdBase} enclosed in round brackets, e.g. + * {@code my-project-(cool-extension)}, where {@code cool-extension} is an {@link #artifactIdBase} and + * {@code my-project-} is {@link #artifactIdPrefix}. This is a way to avoid defining of {@link #artifactIdPrefix} + * and {@link #artifactIdBase} separately. If no round brackets are present in {@link #artifactId}, + * {@link #artifactIdBase} will be equal to {@link #artifactId} and {@link #artifactIdPrefix} will be an empty + * string. + * + * @since 0.20.0 + */ + @Parameter(property = "artifactId") + String artifactId; + + /** + * This parameter was introduced to change the property of {@code artifactId} parameter from {@code quarkus.artifactId} to + * {@code artifactId} + * + * @since 1.3.0 + */ + @Deprecated + @Parameter(property = "quarkus.artifactId") + String deprecatedArtifactId; + + /** + * A prefix common to all extension artifactIds in the current source tree. If you set {@link #artifactIdPrefix}, + * set also {@link #artifactIdBase}, but do not set {@link #artifactId}. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.artifactIdPrefix", defaultValue = "") + String artifactIdPrefix; + + /** + * The unique part of the {@link #artifactId}. If you set {@link #artifactIdBase}, {@link #artifactIdPrefix} + * may also be set, but not {@link #artifactId}. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.artifactIdBase") + String artifactIdBase; + + /** + * The {@code version} for the newly created Maven modules. If {@code version} is left unset, the {@code version} + * from the {@code pom.xml} in the current directory will be used. Otherwise, an exception is thrown. + * + * @since 0.20.0 + */ + @Parameter(property = "version") + String version; + + /** + * This parameter was introduced to change the property of {@code version} parameter from {@code quarkus.artifactVersion} to + * {@code version} + * + * @since 1.3.0 + */ + @Deprecated + @Parameter(property = "quarkus.artifactVersion") + String deprecatedVersion; + + /** + * The {@code name} of the runtime module. The {@code name}s of the extension parent and deployment modules will be + * based on this {@code name} too. + *

    + * Optionally, this value can contain the {@link #nameBase} enclosed in round brackets, e.g. + * {@code My Project - (Cool Extension)}, where {@code Cool Extension} is a {@link #nameBase} and + * {@code My Project - } is {@link #namePrefix}. This is a way to avoid defining of {@link #namePrefix} and + * {@link #nameBase} separately. If no round brackets are present in {@link #name}, the {@link #nameBase} will be + * equal to {@link #name} and {@link #namePrefix} will be an empty string. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.name") + String name; + + /** + * A prefix common to all extension names in the current source tree. If you set {@link #namePrefix}, set also + * {@link #nameBase}, but do not set {@link #name}. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.namePrefix") + String namePrefix; + + /** + * The unique part of the {@link #name}. If you set {@link #nameBase}, set also {@link #namePrefix}, but do not set + * {@link #name}. + *

    + * If neither {@link #name} nor @{link #nameBase} is set, @{link #nameBase} will be derived from + * {@link #artifactIdBase}. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.nameBase") + String nameBase; + + /** + * A string that will delimit {@link #name} from {@code Parent}, {@code Runtime} and {@code Deployment} tokens in + * the respective modules. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.nameSegmentDelimiter", defaultValue = DEFAULT_NAME_SEGMENT_DELIMITER) + String nameSegmentDelimiter; + + /** + * Base Java package under which Java classes should be created in Runtime and Deployment modules. If not set, the + * Java package will be auto-generated out of {@link #groupId}, {@link #javaPackageInfix} and {@link #artifactId} + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.javaPackageBase") + String javaPackageBase; + + /** + * If {@link #javaPackageBase} is not set explicitly, this infix will be put between package segments taken from + * {@link #groupId} and {@link #artifactId}. + *

    + * Example: Given + *

      + *
    • {@link #groupId} is {@code org.example.quarkus.extensions}
    • + *
    • {@link #javaPackageInfix} is {@code foo.bar}
    • + *
    • {@link #artifactId} is {@code cool-extension}
    • + *
    + * Then the auto-generated {@link #javaPackageBase} will be + * {@code org.example.quarkus.extensions.foo.bar.cool.extension} + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.javaPackageInfix") + String javaPackageInfix; + + /** + * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the + * Parent module. If {@code grandParentArtifactId} is left unset, the {@code artifactId} from the {@code pom.xml} in + * the current directory will be used. Otherwise, an exception is thrown. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.grandParentArtifactId") + @Deprecated + String grandParentArtifactId; + + /** + * This mojo creates a Maven project consisting of the root POM and two modules: the runtime and the deployment one. + * This parameter configures the {@code artifactId} of the root POM's parent POM. + * If {@code parentArtifactId} is left unset, then if the current directory already includes a {@code pom.xml} + * its {@code artifactId} will be used as the parent's {@code artifactId} and if the current directory does not contain + * any {@code pom.xml} then the generated root {@code pom.xml} won't have any parent. + */ + @Parameter(property = "parentArtifactId") + String parentArtifactId; + + /** + * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the + * Parent module. If {@code grandParentGroupId} is left unset, the {@code groupId} from the {@code pom.xml} in the + * current directory will be used. Otherwise, an exception is thrown. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.grandParentGroupId") + @Deprecated + String grandParentGroupId; + + /** + * This mojo creates a Maven project consisting of the root POM and two modules: the runtime and the deployment one. + * This parameter configures the {@code groupId} of the root POM's parent POM. + * If {@code parentGroupId} is left unset, then if the current directory already includes a {@code pom.xml} + * its {@code groupId} will be used as the parent's {@code groupId} and if the current directory does not contain + * any {@code pom.xml} then the generated root {@code pom.xml} won't have any parent. + */ + @Parameter(property = "parentGroupId") + String parentGroupId; + + /** + * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the + * Parent module. If {@code grandParentRelativePath} is left unset, the default {@code relativePath} + * {@code "../pom.xml"} is used. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.grandParentRelativePath") + @Deprecated + String grandParentRelativePath; + + /** + * This mojo creates a Maven project consisting of the root POM and two modules: the runtime and the deployment one. + * This parameter configures the {@code relativePath} of the root POM's parent POM. + * If {@code grandParentRelativePath} is left unset, the default {@code relativePath} is used. + */ + @Parameter(property = "parentRelativePath") + String parentRelativePath; + + /** + * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the + * Parent module. If {@code grandParentVersion} is left unset, the {@code version} from the {@code pom.xml} in the + * current directory will be used. Otherwise, an exception is thrown. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.grandParentVersion") + @Deprecated + String grandParentVersion; + + /** + * This mojo creates a Maven project consisting of the root POM and two modules: the runtime and the deployment one. + * This parameter configures the {@code version} of the root POM's parent POM. + * If {@code parentVersion} is left unset, then if the current directory already includes a {@code pom.xml} + * its {@code version} will be used as the parent's {@code version} and if the current directory does not contain + * any {@code pom.xml} then the generated root {@code pom.xml} won't have any parent. + */ + @Parameter(property = "parentVersion") + String parentVersion; + + /** + * Quarkus version the newly created extension should depend on. If you want to pass a property placeholder, use + * {@code @} instead if {@code $} so that the property is not evaluated by the current mojo - e.g. + * @{quarkus.version} + * + * @since 0.20.0 + */ + @Parameter(defaultValue = DEFAULT_QUARKUS_VERSION, required = true, property = "quarkus.version") + String quarkusVersion; + + /** + * This parameter was introduced to change the property of {@code artifactId} parameter from {@code quarkus.artifactId} to + * {@code artifactId} + * + * @since 1.3.0 + */ + @Deprecated + @Parameter(property = "quarkus.quarkusVersion") + String deprecatedQuarkusVersion; + + /** + * If {@code true} the Maven dependencies in Runtime and Deployment modules will not have their versions set and the + * {@code quarkus-bootstrap-maven-plugin} in the Runtime module will not have its version set and it will have no + * executions configured. If {@code false} the version set in {@link #quarkusVersion} will be used where applicable + * and {@code quarkus-bootstrap-maven-plugin} in the Runtime module will be configured explicitly. If the value is + * {@code null} the parameter will be treated as it was initialized to {@code false}. + * + * @since 0.20.0 + */ + @Parameter(property = "quarkus.assumeManaged") + Boolean assumeManaged; + + /** + * URI prefix to use when looking up FreeMarker templates when generating various source files. You need to touch + * this only if you want to provide your own custom templates. + *

    + * The following URI schemes are supported: + *

      + *
    • {@code classpath:}
    • + *
    • {@code file:} (relative to {@link #basedir})
    • + *
    + * These are the template files you may want to provide under your custom {@link #templatesUriBase}: + *
      + *
    • {@code deployment-pom.xml}
    • + *
    • {@code integration-test-application.properties}
    • + *
    • {@code integration-test-pom.xml}
    • + *
    • {@code IT.java}
    • + *
    • {@code parent-pom.xml}
    • + *
    • {@code Processor.java}
    • + *
    • {@code runtime-pom.xml}
    • + *
    • {@code Test.java}
    • + *
    • {@code TestResource.java}
    • + *
    + * Note that you do not need to provide all of them. Files not available in your custom {@link #templatesUriBase} + * will be looked up in the default URI base {@value #DEFAULT_TEMPLATES_URI_BASE}. The default templates are + * maintained here. + * + * @since 0.20.0 + */ + @Parameter(defaultValue = DEFAULT_TEMPLATES_URI_BASE, required = true, property = "quarkus.templatesUriBase") + String templatesUriBase; + + /** + * Encoding to read and write files in the current source tree + * + * @since 0.20.0 + */ + @Parameter(defaultValue = DEFAULT_ENCODING, required = true, property = "quarkus.encoding") + String encoding; + + /** + * Path relative to {@link #basedir} pointing at a {@code pom.xml} file containing the platform BOM (Bill of Materials) that + * manages extension artifacts. If set, the newly created runtime and deployment modules will be added to + * {@code } section of this bom; otherwise the newly created modules will not be added + * to any BOM. + * + * @since 1.7.0.Final + */ + @Parameter(property = "bomPath") + Path bomPath; + + /** + * A version for the entries added to the platform BOM (see {@link #bomPath}). + * If you want to pass a property placeholder, use {@code @} instead if {@code $} so + * that the property is not evaluated by the current mojo - e.g. @{my-project.version} + * + * @since 0.25.0 + */ + @Parameter(property = "quarkus.bomEntryVersion", defaultValue = DEFAULT_BOM_ENTRY_VERSION) + String bomEntryVersion; + + /** + * A list of strings of the form {@code groupId:artifactId:version[:type[:classifier[:scope]]]} representing the + * dependencies that should be added to the generated runtime module and to the platform BOM if it is specified via + * {@link #bomPath}. + *

    + * In case the built-in Maven ${placeholder} expansion does not work well for you (because you e.g. + * pass {@link #additionalRuntimeDependencies}) via CLI, the Mojo supports a custom @{placeholder} + * expansion: + *

      + *
    • @{$} will be expanded to {@code $} - handy for escaping standard placeholders. E.g. to insert + * ${quarkus.version} to the platform BOM, you need to pass @{$}{quarkus.version}
    • + *
    • @{quarkus.field} will be expanded to whatever value the given {@code field} of this mojo has at + * runtime.
    • + *
    • Any other @{placeholder} will be resolved using the current project's properties
    • + *
    + * + * @since 0.22.0 + */ + @Parameter(property = "quarkus.additionalRuntimeDependencies") + List additionalRuntimeDependencies; + + /** + * An absolute path or a path relative to {@link #basedir} pointing at a {@code pom.xml} file that should serve as + * a parent for the integration test Maven module this mojo generates. If {@link #itestParentPath} is not set, + * the integration test module will not be generated. + * + * @since 0.22.0 + */ + @Parameter(property = "quarkus.itestParentPath") + Path itestParentPath; + + @Parameter(defaultValue = "${project}", readonly = true) + MavenProject project; + + /** + * The entry point to Aether, i.e. the component doing all the work. + * + * @component + */ + @Component + private RepositorySystem repoSystem; + + /** + * The current repository/network configuration of Maven. + * + * @parameter default-value="${repositorySystemSession}" + * @readonly + */ + @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) + private RepositorySystemSession repoSession; + + /** + * The project's remote repositories to use for the resolution of artifacts and their dependencies. + * + * @parameter default-value="${project.remoteProjectRepositories}" + * @readonly + */ + @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) + private List repos; + + /** + * The version of {@code org.apache.maven.plugins:maven-compiler-plugin} that should be used for + * the extension project. + */ + @Parameter(defaultValue = COMPILER_PLUGIN_DEFAULT_VERSION, required = true, property = "quarkus.mavenCompilerPluginVersion") + String compilerPluginVersion; + + /** + * Indicates whether to generate a unit test class for the extension + * + * @since 1.10.0 + */ + @Parameter(property = "quarkus.generateUnitTest", defaultValue = "true") + boolean generateUnitTest; + + /** + * Indicates whether to generate a dev mode unit test class for the extension + * + * @since 1.10.0 + */ + @Parameter(property = "quarkus.generateDevModeTest", defaultValue = "true") + boolean generateDevModeTest; + + /** + * Indicates whether to generate the extension under the current directory or under a directory based on the + * artifactId + * + */ + @Parameter(property = "quarkus.useCurrentDirectory", defaultValue = "false") + boolean useCurrentDirectory; + + boolean currentProjectIsBaseDir; + + Charset charset; + + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + + if (this.basedir == null) { + currentProjectIsBaseDir = true; + this.basedir = new File(".").getAbsoluteFile(); + } + + if (deprecatedGroupId != null) { + if (groupId != null) { + throw new MojoExecutionException("Either groupId or deprecatedGroupId can be set at a time but got groupId=" + + groupId + " and deprecatedGroupId=" + deprecatedGroupId); + } + groupId = deprecatedGroupId; + } + if (deprecatedArtifactId != null) { + if (artifactId != null) { + throw new MojoExecutionException( + "Either artifactId or deprecatedArtifactId can be set at a time but got artifactId=" + artifactId + + " and deprecatedArtifactId=" + deprecatedArtifactId); + } + artifactId = deprecatedArtifactId; + } + if (deprecatedVersion != null) { + if (version != null) { + throw new MojoExecutionException("Either version or deprecatedVersion can be set at a time but got version=" + + version + " and deprecatedVersion=" + deprecatedVersion); + } + version = deprecatedVersion; + } + if (deprecatedQuarkusVersion != null) { + if (quarkusVersion != null && !DEFAULT_QUARKUS_VERSION.equals(quarkusVersion)) { + throw new MojoExecutionException( + "Either quarkusVersion or deprecatedQuarkusVersion can be set at a time but got quarkusVersion=" + + quarkusVersion + " and deprecatedQuarkusVersion=" + deprecatedQuarkusVersion); + } + quarkusVersion = deprecatedQuarkusVersion; + } + + if (artifactId != null) { + artifactIdBase = artifactIdBase(artifactId); + artifactIdPrefix = artifactId.substring(0, artifactId.length() - artifactIdBase.length()); + artifactId = BRACKETS_PATTERN.matcher(artifactId).replaceAll(""); + } else if (artifactIdBase != null) { + artifactId = artifactIdPrefix == null || artifactIdPrefix.isEmpty() ? artifactIdBase + : artifactIdPrefix + artifactIdBase; + } else { + throw new MojoFailureException(String.format( + "Either artifactId or both artifactIdPrefix and artifactIdBase must be specified; found: artifactId=[%s], artifactIdPrefix=[%s], artifactIdBase[%s]", + artifactId, artifactIdPrefix, artifactIdBase)); + } + + if (name != null) { + final int pos = name.lastIndexOf(nameSegmentDelimiter); + if (pos >= 0) { + nameBase = name.substring(pos + nameSegmentDelimiter.length()); + } else { + nameBase = name; + } + namePrefix = name.substring(0, name.length() - nameBase.length()); + } else { + if (nameBase == null) { + nameBase = toCapWords(artifactIdBase); + } + if (namePrefix == null) { + namePrefix = ""; + } + if (nameBase != null && namePrefix != null) { + name = namePrefix + nameBase; + } else { + throw new MojoFailureException("Either name or both namePrefix and nameBase must be specified"); + } + } + + if (bomPath != null) { + bomPath = basedir.toPath().resolve(bomPath); + if (!Files.exists(bomPath)) { + throw new MojoFailureException("bomPath does not exist: " + bomPath); + } + } + + charset = Charset.forName(encoding); + + try { + File rootPom = null; + Model rootModel = null; + boolean importDeploymentBom = true; + boolean setCompilerPluginVersion = true; + boolean setQuarkusVersionProp = true; + if (isCurrentProjectExists()) { + rootPom = getCurrentProjectPom(); + if (rootPom != null && useCurrentDirectory) { + throw new MojoFailureException( + "Cannot add extension under this directory. Pom file was found."); + } + rootModel = MojoUtils.readPom(rootPom); + if (!"pom".equals(rootModel.getPackaging())) { + throw new MojoFailureException( + "Can add extension modules only under a project with packaging 'pom'; found: " + + rootModel.getPackaging() + ""); + } + + if (rootPom.equals(project.getFile())) { + importDeploymentBom = !hasQuarkusDeploymentBom(); + setCompilerPluginVersion = !project.getPluginManagement().getPluginsAsMap() + .containsKey(COMPILER_PLUGIN_KEY); + setQuarkusVersionProp = !project.getProperties().containsKey(QUARKUS_VERSION_PROP); + } else { + // aloubyansky: not sure we should support this case and not sure it ever worked properly + // this is about creating an extension project not in the context of the current project from the Maven's plugin perspective + // kind of a pathological use-case from the Maven's perspective, imo + final DefaultArtifact rootArtifact = new DefaultArtifact(getGroupId(rootModel), + rootModel.getArtifactId(), null, rootModel.getPackaging(), getVersion(rootModel)); + try { + final MavenArtifactResolver mvn = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setCurrentProject(LocalProject.loadWorkspace(rootPom.getParentFile().toPath())) + .build(); + final ArtifactDescriptorResult rootDescr = mvn.resolveDescriptor(rootArtifact); + importDeploymentBom = !hasQuarkusDeploymentBom(rootDescr.getManagedDependencies()); + // TODO determine whether the compiler plugin is configured for the project + setQuarkusVersionProp = !rootDescr.getProperties().containsKey(QUARKUS_VERSION_PROP); + } catch (Exception e) { + throw new MojoExecutionException("Failed to resolve " + rootArtifact + " descriptor", e); + } + } + } else if (parentRelativePath() != null) { + // aloubyansky: not sure we should support this case, same as above + final File gpPom = getExtensionProjectBaseDir().resolve(parentRelativePath()).normalize() + .toAbsolutePath().toFile(); + if (gpPom.exists()) { + rootPom = gpPom; + rootModel = MojoUtils.readPom(gpPom); + final DefaultArtifact rootArtifact = new DefaultArtifact(getGroupId(rootModel), + rootModel.getArtifactId(), null, rootModel.getPackaging(), getVersion(rootModel)); + try { + final MavenArtifactResolver mvn = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setCurrentProject(LocalProject.loadWorkspace(rootPom.getParentFile().toPath())) + .build(); + final ArtifactDescriptorResult rootDescr = mvn.resolveDescriptor(rootArtifact); + importDeploymentBom = !hasQuarkusDeploymentBom(rootDescr.getManagedDependencies()); + // TODO determine whether the compiler plugin is configured for the project + setQuarkusVersionProp = !rootDescr.getProperties().containsKey(QUARKUS_VERSION_PROP); + } catch (Exception e) { + throw new MojoExecutionException("Failed to resolve " + rootArtifact + " descriptor", e); + } + } + } + + final TemplateParams templateParams = getTemplateParams(rootModel); + final Configuration cfg = getTemplateConfig(); + + generateExtensionProjects(cfg, templateParams); + if (setQuarkusVersionProp) { + setQuarkusVersionProp(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); + } + if (importDeploymentBom) { + addQuarkusDeploymentBom(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); + } + if (setCompilerPluginVersion) { + setCompilerPluginVersion(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); + } + if (rootModel != null) { + addModules(rootPom.toPath(), templateParams, rootModel); + } + + if (bomPath != null) { + getLog().info( + String.format("Adding [%s] to dependencyManagement in [%s]", templateParams.artifactId, + bomPath)); + List transformations = new ArrayList(); + transformations + .add(Transformation.addManagedDependency(templateParams.groupId, templateParams.artifactId, + templateParams.bomEntryVersion)); + for (Gavtcs gavtcs : templateParams.additionalRuntimeDependencies) { + getLog().info(String.format("Adding [%s] to dependencyManagement in [%s]", gavtcs, bomPath)); + transformations.add(Transformation.addManagedDependency(gavtcs)); + } + final String aId = templateParams.artifactId + "-deployment"; + getLog().info(String.format("Adding [%s] to dependencyManagement in [%s]", aId, bomPath)); + transformations.add(Transformation.addManagedDependency(templateParams.groupId, aId, + templateParams.bomEntryVersion)); + + pomTransformer(bomPath).transform(transformations); + } + if (itestParentPath != null) { + generateItest(cfg, templateParams); + } + } catch (IOException e) { + throw new MojoExecutionException(String.format("Could not read %s", project.getFile()), e); + } catch (TemplateException e) { + throw new MojoExecutionException(String.format("Could not process a FreeMarker template"), e); + } + } + + private void setQuarkusVersionProp(File pom) throws IOException, MojoExecutionException { + pomTransformer(pom.toPath()).transform(Transformation.addProperty(QUARKUS_VERSION_PROP, + quarkusVersion.equals(DEFAULT_QUARKUS_VERSION) ? getPluginVersion() : quarkusVersion)); + } + + private void setCompilerPluginVersion(File pom) throws IOException { + pomTransformer(pom.toPath()).transform(Transformation.addProperty(COMPILER_PLUGIN_VERSION_PROP, compilerPluginVersion)); + pomTransformer(pom.toPath()) + .transform(Transformation.addManagedPlugin( + MojoUtils.plugin(COMPILER_PLUGIN_GROUP_ID, COMPILER_PLUGIN_ARTIFACT_ID, + COMPILER_PLUGIN_VERSION_POM_EXPR))); + } + + private void addQuarkusDeploymentBom(File pom) throws IOException, MojoExecutionException { + addQuarkusDeploymentBom(MojoUtils.readPom(pom), pom); + } + + private void addQuarkusDeploymentBom(Model model, File file) throws IOException, MojoExecutionException { + pomTransformer(file.toPath()) + .transform(Transformation.addManagedDependency( + new Gavtcs(platformGroupId, platformArtifactId, QUARKUS_VERSION_POM_EXPR, "pom", null, "import"))); + } + + private String getPluginVersion() throws MojoExecutionException { + return CreateUtils.resolvePluginInfo(CreateExtensionLegacyMojo.class).getVersion(); + } + + private boolean hasQuarkusDeploymentBom() { + if (project.getDependencyManagement() == null) { + return false; + } + for (org.apache.maven.model.Dependency dep : project.getDependencyManagement().getDependencies()) { + if (dep.getArtifactId().equals("quarkus-core-deployment") + && dep.getGroupId().equals("io.quarkus")) { + // this is not a 100% accurate check but practically valid + return true; + } + } + return false; + } + + private boolean hasQuarkusDeploymentBom(List deps) { + if (deps == null) { + return false; + } + for (Dependency dep : deps) { + if (dep.getArtifact().getArtifactId().equals("quarkus-core-deployment") + && dep.getArtifact().getGroupId().equals("io.quarkus")) { + // this is not a 100% accurate check but practically valid + return true; + } + } + return false; + } + + /** + * @return true if the goal is executed in an existing project + */ + private boolean isCurrentProjectExists() { + return currentProjectIsBaseDir ? project.getFile() != null + : Files.exists(basedir.toPath().resolve("pom.xml")); + } + + private File getCurrentProjectPom() { + if (currentProjectIsBaseDir) { + return project.getFile() == null ? new File(project.getBasedir(), "pom.xml") : project.getFile(); + } + return new File(basedir, "pom.xml"); + } + + private Path getExtensionProjectBaseDir() { + if (currentProjectIsBaseDir) { + if (useCurrentDirectory) { + return project.getBasedir() == null ? basedir.toPath() + : project.getBasedir().toPath(); + } + return project.getBasedir() == null ? basedir.toPath().resolve(artifactIdBase) + : project.getBasedir().toPath().resolve(artifactIdBase); + } + return useCurrentDirectory ? basedir.toPath() + : new File(basedir, artifactIdBase).toPath(); + } + + private Path getExtensionRuntimeBaseDir() { + return getExtensionProjectBaseDir().resolve("runtime"); + } + + private Path getExtensionDeploymentBaseDir() { + return getExtensionProjectBaseDir().resolve("deployment"); + } + + void addModules(Path basePomXml, TemplateParams templateParams, Model basePom) + throws IOException, TemplateException, MojoFailureException, MojoExecutionException { + if (!basePom.getModules().contains(templateParams.artifactIdBase)) { + getLog().info(String.format("Adding module [%s] to [%s]", templateParams.artifactIdBase, basePomXml)); + pomTransformer(basePomXml).transform(Transformation.addModule(templateParams.artifactIdBase)); + } + } + + private void generateExtensionProjects(Configuration cfg, TemplateParams templateParams) + throws IOException, TemplateException, MojoExecutionException { + evalTemplate(cfg, "parent-pom.xml", getExtensionProjectBaseDir().resolve("pom.xml"), templateParams); + + Files.createDirectories( + getExtensionRuntimeBaseDir().resolve("src/main/java") + .resolve(templateParams.javaPackageBase.replace('.', '/'))); + evalTemplate(cfg, "runtime-pom.xml", getExtensionRuntimeBaseDir().resolve("pom.xml"), + templateParams); + + evalTemplate(cfg, "deployment-pom.xml", getExtensionDeploymentBaseDir().resolve("pom.xml"), + templateParams); + final Path processorPath = getExtensionDeploymentBaseDir() + .resolve("src/main/java") + .resolve(templateParams.javaPackageBase.replace('.', '/')) + .resolve("deployment") + .resolve(templateParams.artifactIdBaseCamelCase + "Processor.java"); + evalTemplate(cfg, "Processor.java", processorPath, templateParams); + + if (generateUnitTest) { + generateUnitTestClass(cfg, templateParams); + } + + if (generateDevModeTest) { + generateDevModeTestClass(cfg, templateParams); + } + } + + private PomTransformer pomTransformer(Path basePomXml) { + return new PomTransformer(basePomXml, charset); + } + + private TemplateParams getTemplateParams(Model basePom) throws MojoExecutionException { + final TemplateParams templateParams = new TemplateParams(); + + templateParams.artifactId = artifactId; + templateParams.artifactIdPrefix = artifactIdPrefix; + templateParams.artifactIdBase = artifactIdBase; + templateParams.artifactIdBaseCamelCase = toCapCamelCase(templateParams.artifactIdBase); + + if (groupId == null) { + if (basePom == null) { + throw new MojoExecutionException( + "Please provide the desired groupId for the project by setting groupId parameter"); + } + templateParams.groupId = getGroupId(basePom); + } else { + templateParams.groupId = groupId; + } + + if (version == null) { + if (basePom == null) { + throw new MojoExecutionException( + "Please provide the desired version for the project by setting version parameter"); + } + templateParams.version = getVersion(basePom); + } else { + templateParams.version = version; + } + + templateParams.namePrefix = namePrefix; + templateParams.nameBase = nameBase; + templateParams.nameSegmentDelimiter = nameSegmentDelimiter; + templateParams.assumeManaged = detectAssumeManaged(); + templateParams.quarkusVersion = QUARKUS_VERSION_POM_EXPR; + templateParams.bomEntryVersion = bomEntryVersion.replace('@', '$'); + + templateParams.grandParentGroupId = parentGroupId() != null ? parentGroupId() : getGroupId(basePom); + templateParams.grandParentArtifactId = parentArtifactId() != null ? parentArtifactId() + : basePom == null ? null : basePom.getArtifactId(); + templateParams.grandParentVersion = parentVersion() != null ? parentVersion() : getVersion(basePom); + templateParams.grandParentRelativePath = parentRelativePath(); + + templateParams.javaPackageBase = javaPackageBase != null ? javaPackageBase + : getJavaPackage(templateParams.groupId, javaPackageInfix, artifactId); + templateParams.additionalRuntimeDependencies = getAdditionalRuntimeDependencies(); + templateParams.bomPathSet = bomPath != null; + return templateParams; + } + + private String parentArtifactId() { + return parentArtifactId == null ? grandParentArtifactId : parentArtifactId; + } + + private String parentGroupId() { + return parentGroupId == null ? grandParentGroupId : parentGroupId; + } + + private String parentVersion() { + return parentVersion == null ? grandParentVersion : parentVersion; + } + + private String parentRelativePath() { + return parentRelativePath == null ? grandParentRelativePath : parentRelativePath; + } + + private Configuration getTemplateConfig() throws IOException { + final Configuration templateCfg = new Configuration(Configuration.VERSION_2_3_28); + templateCfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); + templateCfg.setTemplateLoader(createTemplateLoader(basedir, templatesUriBase)); + templateCfg.setDefaultEncoding(encoding); + templateCfg.setInterpolationSyntax(Configuration.SQUARE_BRACKET_INTERPOLATION_SYNTAX); + templateCfg.setTagSyntax(Configuration.SQUARE_BRACKET_TAG_SYNTAX); + return templateCfg; + } + + private void generateUnitTestClass(Configuration cfg, TemplateParams model) throws IOException, TemplateException { + final Path unitTest = basedir.toPath() + .resolve(getExtensionTestPath(model.artifactIdBase) + model.javaPackageBase.replace('.', '/') + + "/test/" + model.artifactIdBaseCamelCase + "Test.java"); + + evalTemplate(cfg, "UnitTest.java", unitTest, model); + } + + private void generateDevModeTestClass(Configuration cfg, TemplateParams model) throws IOException, TemplateException { + final Path devModeTest = basedir + .toPath() + .resolve(getExtensionTestPath(model.artifactIdBase) + model.javaPackageBase.replace('.', '/') + + "/test/" + model.artifactIdBaseCamelCase + "DevModeTest.java"); + + evalTemplate(cfg, "DevModeTest.java", devModeTest, model); + + } + + void generateItest(Configuration cfg, TemplateParams model) + throws MojoFailureException, MojoExecutionException, TemplateException, IOException { + final Path itestParentAbsPath = basedir.toPath().resolve(itestParentPath); + try (Reader r = Files.newBufferedReader(itestParentAbsPath, charset)) { + final Model itestParent = new MavenXpp3Reader().read(r); + if (!"pom".equals(itestParent.getPackaging())) { + throw new MojoFailureException( + "Can add an extension integration test only under a project with packagin 'pom'; found: " + + itestParent.getPackaging() + " in " + itestParentAbsPath); + } + model.itestParentGroupId = getGroupId(itestParent); + model.itestParentArtifactId = itestParent.getArtifactId(); + model.itestParentVersion = getVersion(itestParent); + model.itestParentRelativePath = "../pom.xml"; + + final Path itestDir = itestParentAbsPath.getParent().resolve(model.artifactIdBase); + evalTemplate(cfg, "integration-test-pom.xml", itestDir.resolve("pom.xml"), model); + evalTemplate(cfg, "integration-test-application.properties", + itestDir.resolve("src/main/resources/application.properties"), model); + + final Path testResourcePath = itestDir.resolve("src/main/java/" + model.javaPackageBase.replace('.', '/') + + "/it/" + model.artifactIdBaseCamelCase + "Resource.java"); + evalTemplate(cfg, "TestResource.java", testResourcePath, model); + final Path testClassDir = itestDir + .resolve("src/test/java/" + model.javaPackageBase.replace('.', '/') + "/it"); + evalTemplate(cfg, "Test.java", testClassDir.resolve(model.artifactIdBaseCamelCase + "Test.java"), + model); + evalTemplate(cfg, "IT.java", testClassDir.resolve(model.artifactIdBaseCamelCase + "IT.java"), + model); + + getLog().info(String.format("Adding module [%s] to [%s]", model.artifactIdBase, itestParentAbsPath)); + pomTransformer(itestParentAbsPath).transform(Transformation.addModule(model.artifactIdBase)); + + } catch (IOException e) { + throw new MojoExecutionException(String.format("Could not read %s", itestParentAbsPath), e); + } catch (XmlPullParserException e) { + throw new MojoExecutionException(String.format("Could not parse %s", itestParentAbsPath), e); + } + } + + private List getAdditionalRuntimeDependencies() { + final List result = new ArrayList<>(); + if (additionalRuntimeDependencies != null && !additionalRuntimeDependencies.isEmpty()) { + for (String rawGavtc : additionalRuntimeDependencies) { + rawGavtc = replacePlaceholders(rawGavtc); + result.add(Gavtcs.of(rawGavtc)); + } + } + return result; + } + + private String replacePlaceholders(String gavtc) { + final StringBuffer transformedGavtc = new StringBuffer(); + final Matcher m = PLACEHOLDER_PATTERN.matcher(gavtc); + while (m.find()) { + final String key = m.group(1); + if ("$".equals(key)) { + m.appendReplacement(transformedGavtc, QUOTED_DOLLAR); + } else if (key.startsWith("quarkus.")) { + final String fieldName = key.substring("quarkus.".length()); + try { + final Field field = this.getClass().getDeclaredField(fieldName); + Object val = field.get(this); + if (val != null) { + m.appendReplacement(transformedGavtc, String.valueOf(val)); + } + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException + | IllegalAccessException e) { + throw new RuntimeException(e); + } + } else { + final Object val = project.getProperties().get(key); + if (val != null) { + m.appendReplacement(transformedGavtc, String.valueOf(val)); + } + } + } + m.appendTail(transformedGavtc); + return transformedGavtc.toString(); + } + + boolean detectAssumeManaged() { + return assumeManaged == null ? false : assumeManaged; + } + + static String getGroupId(Model basePom) { + if (basePom == null) { + return null; + } + return basePom.getGroupId() != null ? basePom.getGroupId() + : basePom.getParent() != null && basePom.getParent().getGroupId() != null + ? basePom.getParent().getGroupId() + : null; + } + + static String getVersion(Model basePom) { + if (basePom == null) { + return null; + } + return basePom.getVersion() != null ? basePom.getVersion() + : basePom.getParent() != null && basePom.getParent().getVersion() != null + ? basePom.getParent().getVersion() + : null; + } + + static String toCapCamelCase(String artifactIdBase) { + final StringBuilder sb = new StringBuilder(artifactIdBase.length()); + for (String segment : artifactIdBase.split("[.\\-]+")) { + sb.append(Character.toUpperCase(segment.charAt(0))); + if (segment.length() > 1) { + sb.append(segment.substring(1)); + } + } + return sb.toString(); + } + + static String toCapWords(String artifactIdBase) { + final StringBuilder sb = new StringBuilder(artifactIdBase.length()); + for (String segment : artifactIdBase.split("[.\\-]+")) { + if (sb.length() > 0) { + sb.append(' '); + } + sb.append(Character.toUpperCase(segment.charAt(0))); + if (segment.length() > 1) { + sb.append(segment.substring(1)); + } + } + return sb.toString(); + } + + static String getJavaPackage(String groupId, String javaPackageInfix, String artifactId) { + final Stack segments = new Stack<>(); + for (String segment : groupId.split("[.\\-]+")) { + if (segments.isEmpty() || !segments.peek().equals(segment)) { + segments.add(segment); + } + } + if (javaPackageInfix != null) { + for (String segment : javaPackageInfix.split("[.\\-]+")) { + segments.add(segment); + } + } + for (String segment : artifactId.split("[.\\-]+")) { + if (!segments.contains(segment)) { + segments.add(segment); + } + } + return segments.stream() // + .map(s -> s.toLowerCase(Locale.ROOT)) // + .map(s -> SourceVersion.isKeyword(s) ? s + "_" : s) // + .collect(Collectors.joining(".")); + } + + static TemplateLoader createTemplateLoader(File basedir, String templatesUriBase) throws IOException { + final TemplateLoader defaultLoader = new ClassTemplateLoader(CreateExtensionLegacyMojo.class, + DEFAULT_TEMPLATES_URI_BASE.substring(CLASSPATH_PREFIX.length())); + if (DEFAULT_TEMPLATES_URI_BASE.equals(templatesUriBase)) { + return defaultLoader; + } else if (templatesUriBase.startsWith(CLASSPATH_PREFIX)) { + return new MultiTemplateLoader( // + new TemplateLoader[] { // + new ClassTemplateLoader(CreateExtensionLegacyMojo.class, + templatesUriBase.substring(CLASSPATH_PREFIX.length())), // + defaultLoader // + }); + } else if (templatesUriBase.startsWith(FILE_PREFIX)) { + final Path resolvedTemplatesDir = basedir.toPath().resolve(templatesUriBase.substring(FILE_PREFIX.length())); + return new MultiTemplateLoader( // + new TemplateLoader[] { // + new FileTemplateLoader(resolvedTemplatesDir.toFile()), + defaultLoader // + }); + } else { + throw new IllegalStateException(String.format( + "Cannot handle templatesUriBase '%s'; only value starting with '%s' or '%s' are supported", + templatesUriBase, CLASSPATH_PREFIX, FILE_PREFIX)); + } + } + + static void evalTemplate(Configuration cfg, String templateUri, Path dest, TemplateParams model) + throws IOException, TemplateException { + log.infof("Adding '%s'", dest); + final Template template = cfg.getTemplate(templateUri); + Files.createDirectories(dest.getParent()); + try (Writer out = Files.newBufferedWriter(dest)) { + template.process(model, out); + } + } + + static String artifactIdBase(String artifactId) { + final int lBPos = artifactId.indexOf('('); + final int rBPos = artifactId.indexOf(')'); + if (lBPos >= 0 && rBPos >= 0) { + return artifactId.substring(lBPos + 1, rBPos); + } else { + return artifactId; + } + } + + private String getExtensionTestPath(String artifactIdBase) { + return useCurrentDirectory ? "deployment/src/test/java/" : artifactIdBase + "/deployment/src/test/java/"; + } + + public void setItestParentPath(String itestParentPath) { + this.itestParentPath = Paths.get(itestParentPath); + } + + public static class TemplateParams { + String grandParentRelativePath; + String grandParentVersion; + String grandParentArtifactId; + String grandParentGroupId; + String itestParentRelativePath; + String itestParentVersion; + String itestParentArtifactId; + String itestParentGroupId; + String groupId; + String artifactId; + String artifactIdPrefix; + String artifactIdBase; + String artifactIdBaseCamelCase; + String version; + String namePrefix; + String nameBase; + String nameSegmentDelimiter; + String javaPackageBase; + boolean assumeManaged; + String quarkusVersion; + List additionalRuntimeDependencies; + boolean bomPathSet; + String bomEntryVersion; + + public String getJavaPackageBase() { + return javaPackageBase; + } + + public boolean isAssumeManaged() { + return assumeManaged; + } + + public String getArtifactIdPrefix() { + return artifactIdPrefix; + } + + public String getArtifactIdBase() { + return artifactIdBase; + } + + public String getNamePrefix() { + return namePrefix; + } + + public String getNameBase() { + return nameBase; + } + + public String getNameSegmentDelimiter() { + return nameSegmentDelimiter; + } + + public String getArtifactIdBaseCamelCase() { + return artifactIdBaseCamelCase; + } + + public String getQuarkusVersion() { + return quarkusVersion; + } + + public String getGrandParentRelativePath() { + return grandParentRelativePath; + } + + public String getGrandParentVersion() { + return grandParentVersion; + } + + public String getGrandParentArtifactId() { + return grandParentArtifactId; + } + + public String getGrandParentGroupId() { + return grandParentGroupId; + } + + public String getGroupId() { + return groupId; + } + + public String getVersion() { + return version; + } + + public String getArtifactId() { + return artifactId; + } + + public List getAdditionalRuntimeDependencies() { + return additionalRuntimeDependencies; + } + + public boolean isBomPathSet() { + return bomPathSet; + } + + public String getItestParentRelativePath() { + return itestParentRelativePath; + } + + public String getItestParentVersion() { + return itestParentVersion; + } + + public String getItestParentArtifactId() { + return itestParentArtifactId; + } + + public String getItestParentGroupId() { + return itestParentGroupId; + } + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionMojo.java index 34f230dc0a491..337156b17c661 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionMojo.java @@ -1,1162 +1,287 @@ package io.quarkus.maven; +import static io.quarkus.devtools.commands.CreateExtension.extractQuarkiverseExtensionId; +import static io.quarkus.devtools.commands.CreateExtension.isQuarkiverseGroupId; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isEmpty; +import static org.fusesource.jansi.Ansi.ansi; + import java.io.File; import java.io.IOException; -import java.io.Reader; -import java.io.Writer; -import java.lang.reflect.Field; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Stack; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import javax.lang.model.SourceVersion; -import org.apache.maven.model.Model; -import org.apache.maven.model.io.xpp3.MavenXpp3Reader; +import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; -import org.codehaus.plexus.util.xml.pull.XmlPullParserException; -import org.eclipse.aether.RepositorySystem; -import org.eclipse.aether.RepositorySystemSession; -import org.eclipse.aether.artifact.DefaultArtifact; -import org.eclipse.aether.graph.Dependency; -import org.eclipse.aether.repository.RemoteRepository; -import org.eclipse.aether.resolution.ArtifactDescriptorResult; import org.jboss.logging.Logger; -import freemarker.cache.ClassTemplateLoader; -import freemarker.cache.FileTemplateLoader; -import freemarker.cache.MultiTemplateLoader; -import freemarker.cache.TemplateLoader; -import freemarker.template.Configuration; -import freemarker.template.Template; -import freemarker.template.TemplateException; -import freemarker.template.TemplateExceptionHandler; -import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; -import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; -import io.quarkus.maven.utilities.MojoUtils; -import io.quarkus.maven.utilities.PomTransformer; -import io.quarkus.maven.utilities.PomTransformer.Gavtcs; -import io.quarkus.maven.utilities.PomTransformer.Transformation; +import io.quarkus.devtools.commands.CreateExtension; +import io.quarkus.devtools.commands.data.QuarkusCommandException; +import io.quarkus.maven.components.Prompter; /** - * Creates a triple of stub Maven modules (Parent, Runtime and Deployment) to implement a new - * Quarkus Extension. - * - *

    Adding into an established source tree

    - *

    - * If this Mojo is executed in a directory that contains a {@code pom.xml} file with packaging {@code pom} the newly - * created Parent module is added as a child module to the existing {@code pom.xml} file. - * - *

    Creating a source tree from scratch

    - *

    - * Executing this Mojo in an empty directory is not supported yet. + * Creates the base of a + * Quarkus Extension in different layout depending of the options and + * environment. + *
    + *
    + *

    Create in the quarkus-parent project directory (or the extensions parent dir)

    + *
    + * It will: + *
      + *
    • generate the new Quarkus extension in the extensions parent as a module (parent, runtime and deployment), with unit test + * and devmode test on option.
    • + *
    • On option, generate the new integration test in the integration tests parent as a module.
    • + *
    • add the dependencies to the bom/application/pom.xml.
    • + *
    + *
    + *

    Creating a Quarkiverse extension

    + *
    + * When using -DgroupId=io.quarkiverse.[featureId], the new extension will use the Quarkiverse layout. + *
    + *
    + *

    Creating a standalone extension

    + *
    + *
      + *
    • generate the new Quarkus extension in the current directory (parent, runtime and deployment), with unit test and devmode + * test on option.
    • + *
    • On option, generate the new integration test module in the current directory.
    • + *
    */ @Mojo(name = "create-extension", requiresProject = false) public class CreateExtensionMojo extends AbstractMojo { - private static final String QUOTED_DOLLAR = Matcher.quoteReplacement("$"); - private static final Logger log = Logger.getLogger(CreateExtensionMojo.class); - private static final Pattern BRACKETS_PATTERN = Pattern.compile("[()]+"); - private static final String CLASSPATH_PREFIX = "classpath:"; - private static final String FILE_PREFIX = "file:"; - - private static final String QUARKUS_VERSION_PROP = "quarkus.version"; - - static final String DEFAULT_ENCODING = "utf-8"; - static final String DEFAULT_QUARKUS_VERSION = "@{" + QUARKUS_VERSION_PROP + "}"; - static final String QUARKUS_VERSION_POM_EXPR = "${" + QUARKUS_VERSION_PROP + "}"; - static final String DEFAULT_BOM_ENTRY_VERSION = "@{project.version}"; - static final String DEFAULT_TEMPLATES_URI_BASE = "classpath:/create-extension-templates"; - static final String DEFAULT_NAME_SEGMENT_DELIMITER = " - "; - static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("@\\{([^\\}]+)\\}"); - - static final String PLATFORM_DEFAULT_GROUP_ID = "io.quarkus"; - static final String PLATFORM_DEFAULT_ARTIFACT_ID = "quarkus-bom"; - - static final String COMPILER_PLUGIN_VERSION_PROP = "compiler-plugin.version"; - static final String COMPILER_PLUGIN_VERSION_POM_EXPR = "${" + COMPILER_PLUGIN_VERSION_PROP + "}"; - static final String COMPILER_PLUGIN_DEFAULT_VERSION = "3.8.1"; - private static final String COMPILER_PLUGIN_GROUP_ID = "org.apache.maven.plugins"; - private static final String COMPILER_PLUGIN_ARTIFACT_ID = "maven-compiler-plugin"; - private static final String COMPILER_PLUGIN_KEY = COMPILER_PLUGIN_GROUP_ID + ":" - + COMPILER_PLUGIN_ARTIFACT_ID; - /** - * Directory where the changes should be performed. Default is the current directory of the current Java process. - * - * @since 0.20.0 + * Directory where the changes should be performed. + *
    + *
    + * Default: the current directory of the current Java process. */ - @Parameter(property = "quarkus.basedir") + @Parameter(property = "basedir") File basedir; /** - * The {@code groupId} of the Quarkus platform BOM. - */ - @Parameter(property = "platformGroupId", defaultValue = PLATFORM_DEFAULT_GROUP_ID) - String platformGroupId; - - /** - * The {@code artifactId} of the Quarkus platform BOM. + * {@code extensionId} of this extension (REQUIRED). + *
    + *
    + * It will be used to generate the different extension modules artifactIds + * ([namespaceId][extensionId]-parent), runtime ([namespaceId][extensionId]) and deployment + * ([namespaceId][extensionId]-deployment). */ - @Parameter(property = "platformArtifactId", defaultValue = PLATFORM_DEFAULT_ARTIFACT_ID) - String platformArtifactId; + @Parameter(property = "extensionId") + String extensionId; /** - * The {@code groupId} for the newly created Maven modules. If {@code groupId} is left unset, the {@code groupId} - * from the {@code pom.xml} in the current directory will be used. Otherwise, an exception is thrown. - * - * @since 0.20.0 + * The {@code groupId} for the newly created Maven modules (REQUIRED - INHERITED IN QUARKUS-CORE). */ @Parameter(property = "groupId") String groupId; /** - * This parameter was introduced to change the property of {@code groupId} parameter from {@code quarkus.groupId} to - * {@code groupId} - * - * @since 1.3.0 - */ - @Deprecated - @Parameter(property = "quarkus.groupId") - String deprecatedGroupId; - - /** - * {@code artifactId} of the runtime module. The {@code artifactId}s of the extension parent - * (${artifactId}-parent) and deployment (${artifactId}-deployment) modules will be based - * on this {@code artifactId} too. - *

    - * Optionally, this value can contain the {@link #artifactIdBase} enclosed in round brackets, e.g. - * {@code my-project-(cool-extension)}, where {@code cool-extension} is an {@link #artifactIdBase} and - * {@code my-project-} is {@link #artifactIdPrefix}. This is a way to avoid defining of {@link #artifactIdPrefix} - * and {@link #artifactIdBase} separately. If no round brackets are present in {@link #artifactId}, - * {@link #artifactIdBase} will be equal to {@link #artifactId} and {@link #artifactIdPrefix} will be an empty - * string. - * - * @since 0.20.0 + * Quarkus version the newly created extension should depend on (REQUIRED - INHERITED IN QUARKUS-CORE). */ - @Parameter(property = "artifactId") - String artifactId; - - /** - * This parameter was introduced to change the property of {@code artifactId} parameter from {@code quarkus.artifactId} to - * {@code artifactId} - * - * @since 1.3.0 - */ - @Deprecated - @Parameter(property = "quarkus.artifactId") - String deprecatedArtifactId; - - /** - * A prefix common to all extension artifactIds in the current source tree. If you set {@link #artifactIdPrefix}, - * set also {@link #artifactIdBase}, but do not set {@link #artifactId}. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.artifactIdPrefix", defaultValue = "") - String artifactIdPrefix; + @Parameter(property = "quarkusVersion") + String quarkusVersion; /** - * The unique part of the {@link #artifactId}. If you set {@link #artifactIdBase}, {@link #artifactIdPrefix} - * may also be set, but not {@link #artifactId}. - * - * @since 0.20.0 + * A prefix common to all extension artifactIds in the current source tree. + *
    + *
    + * Default: "quarkus-" in quarkus Quarkus Core and Quarkiverse else "" */ - @Parameter(property = "quarkus.artifactIdBase") - String artifactIdBase; + @Parameter(property = "namespaceId") + String namespaceId; /** - * The {@code version} for the newly created Maven modules. If {@code version} is left unset, the {@code version} - * from the {@code pom.xml} in the current directory will be used. Otherwise, an exception is thrown. - * - * @since 0.20.0 + * The {@code version} for the newly created Maven modules. + *
    + *
    + * Default: automatic in Quarkus Core else {@link CreateExtension#DEFAULT_VERSION} */ @Parameter(property = "version") String version; /** - * This parameter was introduced to change the property of {@code version} parameter from {@code quarkus.artifactVersion} to - * {@code version} - * - * @since 1.3.0 - */ - @Deprecated - @Parameter(property = "quarkus.artifactVersion") - String deprecatedVersion; - - /** - * The {@code name} of the runtime module. The {@code name}s of the extension parent and deployment modules will be + * The {@code extensionName} of the runtime module. The {@code extensionName}s of the extension parent and deployment + * modules will be * based on this {@code name} too. - *

    - * Optionally, this value can contain the {@link #nameBase} enclosed in round brackets, e.g. - * {@code My Project - (Cool Extension)}, where {@code Cool Extension} is a {@link #nameBase} and - * {@code My Project - } is {@link #namePrefix}. This is a way to avoid defining of {@link #namePrefix} and - * {@link #nameBase} separately. If no round brackets are present in {@link #name}, the {@link #nameBase} will be - * equal to {@link #name} and {@link #namePrefix} will be an empty string. - * - * @since 0.20.0 + *
    + *
    + * Default: the formatted {@code extensionId} */ - @Parameter(property = "quarkus.name") - String name; + @Parameter(property = "extensionName") + String extensionName; /** - * A prefix common to all extension names in the current source tree. If you set {@link #namePrefix}, set also - * {@link #nameBase}, but do not set {@link #name}. - * - * @since 0.20.0 + * A prefix common to all extension names in the current source tree. + *
    + *
    + * Default: "quarkus-" in quarkus Quarkus Core and Quarkiverse else "" */ - @Parameter(property = "quarkus.namePrefix") - String namePrefix; + @Parameter(property = "namespaceName") + String namespaceName; /** - * The unique part of the {@link #name}. If you set {@link #nameBase}, set also {@link #namePrefix}, but do not set - * {@link #name}. - *

    - * If neither {@link #name} nor @{link #nameBase} is set, @{link #nameBase} will be derived from - * {@link #artifactIdBase}. - * - * @since 0.20.0 + * Base package under which classes should be created in Runtime and Deployment modules. + *
    + *
    + * Default: auto-generated out of {@link #groupId}, {@link #namespaceId} and {@link #extensionId} */ - @Parameter(property = "quarkus.nameBase") - String nameBase; + @Parameter(property = "packageName") + String packageName; /** - * A string that will delimit {@link #name} from {@code Parent}, {@code Runtime} and {@code Deployment} tokens in - * the respective modules. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.nameSegmentDelimiter", defaultValue = DEFAULT_NAME_SEGMENT_DELIMITER) - String nameSegmentDelimiter; - - /** - * Base Java package under which Java classes should be created in Runtime and Deployment modules. If not set, the - * Java package will be auto-generated out of {@link #groupId}, {@link #javaPackageInfix} and {@link #artifactId} - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.javaPackageBase") - String javaPackageBase; - - /** - * If {@link #javaPackageBase} is not set explicitly, this infix will be put between package segments taken from - * {@link #groupId} and {@link #artifactId}. - *

    - * Example: Given - *

      - *
    • {@link #groupId} is {@code org.example.quarkus.extensions}
    • - *
    • {@link #javaPackageInfix} is {@code foo.bar}
    • - *
    • {@link #artifactId} is {@code cool-extension}
    • - *
    - * Then the auto-generated {@link #javaPackageBase} will be - * {@code org.example.quarkus.extensions.foo.bar.cool.extension} - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.javaPackageInfix") - String javaPackageInfix; - - /** - * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the - * Parent module. If {@code grandParentArtifactId} is left unset, the {@code artifactId} from the {@code pom.xml} in - * the current directory will be used. Otherwise, an exception is thrown. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.grandParentArtifactId") - String grandParentArtifactId; - - /** - * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the - * Parent module. If {@code grandParentGroupId} is left unset, the {@code groupId} from the {@code pom.xml} in the - * current directory will be used. Otherwise, an exception is thrown. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.grandParentGroupId") - String grandParentGroupId; - - /** - * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the - * Parent module. If {@code grandParentRelativePath} is left unset, the default {@code relativePath} - * {@code "../pom.xml"} is used. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.grandParentRelativePath") - String grandParentRelativePath; - - /** - * This mojo creates a triple of Maven modules (Parent, Runtime and Deployment). "Grand parent" is the parent of the - * Parent module. If {@code grandParentVersion} is left unset, the {@code version} from the {@code pom.xml} in the - * current directory will be used. Otherwise, an exception is thrown. - * - * @since 0.20.0 - */ - @Parameter(property = "quarkus.grandParentVersion") - String grandParentVersion; - - /** - * Quarkus version the newly created extension should depend on. If you want to pass a property placeholder, use - * {@code @} instead if {@code $} so that the property is not evaluated by the current mojo - e.g. - * @{quarkus.version} - * - * @since 0.20.0 - */ - @Parameter(defaultValue = DEFAULT_QUARKUS_VERSION, required = true, property = "quarkus.version") - String quarkusVersion; - - /** - * This parameter was introduced to change the property of {@code artifactId} parameter from {@code quarkus.artifactId} to - * {@code artifactId} - * - * @since 1.3.0 + * The {@code groupId} of the Quarkus platform BOM. + *
    + *
    + * Default: {@link CreateExtension#DEFAULT_BOM_GROUP_ID} */ - @Deprecated - @Parameter(property = "quarkus.quarkusVersion") - String deprecatedQuarkusVersion; + @Parameter(property = "quarkusBomGroupId") + String quarkusBomGroupId; /** - * If {@code true} the Maven dependencies in Runtime and Deployment modules will not have their versions set and the - * {@code quarkus-bootstrap-maven-plugin} in the Runtime module will not have its version set and it will have no - * executions configured. If {@code false} the version set in {@link #quarkusVersion} will be used where applicable - * and {@code quarkus-bootstrap-maven-plugin} in the Runtime module will be configured explicitly. If the value is - * {@code null} the parameter will be treated as it was initialized to {@code false}. - * - * @since 0.20.0 + * The {@code artifactId} of the Quarkus platform BOM. + *
    + *
    + * Default: {@link CreateExtension#DEFAULT_BOM_ARTIFACT_ID} */ - @Parameter(property = "quarkus.assumeManaged") - Boolean assumeManaged; + @Parameter(property = "quarkusBomArtifactId") + String quarkusBomArtifactId; /** - * URI prefix to use when looking up FreeMarker templates when generating various source files. You need to touch - * this only if you want to provide your own custom templates. - *

    - * The following URI schemes are supported: - *

      - *
    • {@code classpath:}
    • - *
    • {@code file:} (relative to {@link #basedir})
    • - *
    - * These are the template files you may want to provide under your custom {@link #templatesUriBase}: - *
      - *
    • {@code deployment-pom.xml}
    • - *
    • {@code integration-test-application.properties}
    • - *
    • {@code integration-test-pom.xml}
    • - *
    • {@code IT.java}
    • - *
    • {@code parent-pom.xml}
    • - *
    • {@code Processor.java}
    • - *
    • {@code runtime-pom.xml}
    • - *
    • {@code Test.java}
    • - *
    • {@code TestResource.java}
    • - *
    - * Note that you do not need to provide all of them. Files not available in your custom {@link #templatesUriBase} - * will be looked up in the default URI base {@value #DEFAULT_TEMPLATES_URI_BASE}. The default templates are - * maintained here. - * - * @since 0.20.0 + * The {@code version} of the Quarkus platform BOM. + *
    + *
    + * Default: {@link CreateExtension#DEFAULT_BOM_VERSION} */ - @Parameter(defaultValue = DEFAULT_TEMPLATES_URI_BASE, required = true, property = "quarkus.templatesUriBase") - String templatesUriBase; + @Parameter(property = "quarkusBomVersion") + String quarkusBomVersion; /** - * Encoding to read and write files in the current source tree - * - * @since 0.20.0 + * Indicates whether to generate a unit test class for the extension */ - @Parameter(defaultValue = DEFAULT_ENCODING, required = true, property = "quarkus.encoding") - String encoding; + @Parameter(property = "withoutUnitTest") + boolean withoutUnitTest; /** - * Path relative to {@link #basedir} pointing at a {@code pom.xml} file containing the BOM (Bill of Materials) that - * manages extension artifacts. If set, the newly created runtime and deployment modules will be added to - * {@code } section of this bom; otherwise the newly created modules will not be added - * to any BOM. - * - * @since 1.7.0.Final + * Indicates whether to generate an integration tests for the extension */ - @Parameter(property = "bomPath") - Path bomPath; + @Parameter(property = "withoutIntegrationTests") + boolean withoutIntegrationTests; /** - * A version for the entries added to the runtime BOM (see {@link #runtimeBomPath}) and to the deployment BOM (see - * {@link #deploymentBomPath}). If you want to pass a property placeholder, use {@code @} instead if {@code $} so - * that the property is not evaluated by the current mojo - e.g. @{my-project.version} - * - * @since 0.25.0 + * Indicates whether to generate a devmode test for the extension */ - @Parameter(property = "quarkus.bomEntryVersion", defaultValue = DEFAULT_BOM_ENTRY_VERSION) - String bomEntryVersion; + @Parameter(property = "withoutDevModeTest") + boolean withoutDevModeTest; /** - * A list of strings of the form {@code groupId:artifactId:version[:type[:classifier[:scope]]]} representing the - * dependencies that should be added to the generated runtime module and to the runtime BOM if it is specified via - * {@link #runtimeBomPath}. - *

    - * In case the built-in Maven ${placeholder} expansion does not work well for you (because you e.g. - * pass {@link #additionalRuntimeDependencies}) via CLI, the Mojo supports a custom @{placeholder} - * expansion: - *

      - *
    • @{$} will be expanded to {@code $} - handy for escaping standard placeholders. E.g. to insert - * ${quarkus.version} to the BOM, you need to pass @{$}{quarkus.version}
    • - *
    • @{quarkus.field} will be expanded to whatever value the given {@code field} of this mojo has at - * runtime.
    • - *
    • Any other @{placeholder} will be resolved using the current project's properties
    • - *
    - * - * @since 0.22.0 + * Indicates whether to generate any tests for the extension (same as + * -DwithoutUnitTest -DwithoutIntegrationTest -DwithoutDevModeTest) */ - @Parameter(property = "quarkus.additionalRuntimeDependencies") - List additionalRuntimeDependencies; + @Parameter(property = "withoutTests") + boolean withoutTests; /** - * An absolute path or a path relative to {@link #basedir} pointing at a {@code pom.xml} file that should serve as - * a parent for the integration test Maven module this mojo generates. If {@link #itestParentPath} is not set, - * the integration test module will not be generated. - * - * @since 0.22.0 + * Used to detect legacy command usage and display an error */ - @Parameter(property = "quarkus.itestParentPath") - Path itestParentPath; - - @Parameter(defaultValue = "${project}", readonly = true) - MavenProject project; + @Parameter(property = "artifactId") + String artifactId; - /** - * The entry point to Aether, i.e. the component doing all the work. - * - * @component - */ @Component - private RepositorySystem repoSystem; + private Prompter prompter; - /** - * The current repository/network configuration of Maven. - * - * @parameter default-value="${repositorySystemSession}" - * @readonly - */ - @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) - private RepositorySystemSession repoSession; + @Parameter(defaultValue = "${project}") + protected MavenProject project; - /** - * The project's remote repositories to use for the resolution of artifacts and their dependencies. - * - * @parameter default-value="${project.remoteProjectRepositories}" - * @readonly - */ - @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) - private List repos; - - /** - * The version of {@code org.apache.maven.plugins:maven-compiler-plugin} that should be used for - * the extension project. - */ - @Parameter(defaultValue = COMPILER_PLUGIN_DEFAULT_VERSION, required = true, property = "quarkus.mavenCompilerPluginVersion") - String compilerPluginVersion; - - boolean currentProjectIsBaseDir; - - Charset charset; + @Parameter(defaultValue = "${session}") + private MavenSession session; @Override - public void execute() throws MojoExecutionException, MojoFailureException { - - if (this.basedir == null) { - currentProjectIsBaseDir = true; - this.basedir = new File(".").getAbsoluteFile(); - } - - if (deprecatedGroupId != null) { - if (groupId != null) { - throw new MojoExecutionException("Either groupId or deprecatedGroupId can be set at a time but got groupId=" - + groupId + " and deprecatedGroupId=" + deprecatedGroupId); - } - groupId = deprecatedGroupId; - } - if (deprecatedArtifactId != null) { - if (artifactId != null) { - throw new MojoExecutionException( - "Either artifactId or deprecatedArtifactId can be set at a time but got artifactId=" + artifactId - + " and deprecatedArtifactId=" + deprecatedArtifactId); - } - artifactId = deprecatedArtifactId; - } - if (deprecatedVersion != null) { - if (version != null) { - throw new MojoExecutionException("Either version or deprecatedVersion can be set at a time but got version=" - + version + " and deprecatedVersion=" + deprecatedVersion); - } - version = deprecatedVersion; - } - if (deprecatedQuarkusVersion != null) { - if (quarkusVersion != null && !DEFAULT_QUARKUS_VERSION.equals(quarkusVersion)) { - throw new MojoExecutionException( - "Either quarkusVersion or deprecatedQuarkusVersion can be set at a time but got quarkusVersion=" - + quarkusVersion + " and deprecatedQuarkusVersion=" + deprecatedQuarkusVersion); - } - quarkusVersion = deprecatedQuarkusVersion; - } - - if (artifactId != null) { - artifactIdBase = artifactIdBase(artifactId); - artifactIdPrefix = artifactId.substring(0, artifactId.length() - artifactIdBase.length()); - artifactId = BRACKETS_PATTERN.matcher(artifactId).replaceAll(""); - } else if (artifactIdBase != null) { - artifactId = artifactIdPrefix == null || artifactIdPrefix.isEmpty() ? artifactIdBase - : artifactIdPrefix + artifactIdBase; - } else { - throw new MojoFailureException(String.format( - "Either artifactId or both artifactIdPrefix and artifactIdBase must be specified; found: artifactId=[%s], artifactIdPrefix=[%s], artifactIdBase[%s]", - artifactId, artifactIdPrefix, artifactIdBase)); - } - - if (name != null) { - final int pos = name.lastIndexOf(nameSegmentDelimiter); - if (pos >= 0) { - nameBase = name.substring(pos + nameSegmentDelimiter.length()); - } else { - nameBase = name; - } - namePrefix = name.substring(0, name.length() - nameBase.length()); - } else { - if (nameBase == null) { - nameBase = toCapWords(artifactIdBase); - } - if (namePrefix == null) { - namePrefix = ""; - } - if (nameBase != null && namePrefix != null) { - name = namePrefix + nameBase; - } else { - throw new MojoFailureException("Either name or both namePrefix and nameBase must be specified"); - } - } - - if (bomPath != null) { - bomPath = basedir.toPath().resolve(bomPath); - if (!Files.exists(bomPath)) { - throw new MojoFailureException("runtimeBomPath does not exist: " + bomPath); - } - } - - charset = Charset.forName(encoding); - - try { - File rootPom = null; - Model rootModel = null; - boolean importDeploymentBom = true; - boolean setCompilerPluginVersion = true; - boolean setQuarkusVersionProp = true; - if (isCurrentProjectExists()) { - rootPom = getCurrentProjectPom(); - rootModel = MojoUtils.readPom(rootPom); - if (!"pom".equals(rootModel.getPackaging())) { - throw new MojoFailureException( - "Can add extension modules only under a project with packaging 'pom'; found: " - + rootModel.getPackaging() + ""); - } - - if (rootPom.equals(project.getFile())) { - importDeploymentBom = !hasQuarkusDeploymentBom(); - setCompilerPluginVersion = !project.getPluginManagement().getPluginsAsMap() - .containsKey(COMPILER_PLUGIN_KEY); - setQuarkusVersionProp = !project.getProperties().containsKey(QUARKUS_VERSION_PROP); + public void execute() throws MojoExecutionException { + + if (!isBlank(artifactId)) { + if (isBlank(extensionId)) { + getLog().warn(ansi().a( + "the given 'artifactId' has been automatically converted to 'extensionId' for backward compatibility.") + .toString()); + if (artifactId.startsWith("quarkus-")) { + namespaceId = "quarkus-"; + extensionId = artifactId.replace("quarkus-", ""); } else { - // aloubyansky: not sure we should support this case and not sure it ever worked properly - // this is about creating an extension project not in the context of the current project from the Maven's plugin perspective - // kind of a pathological use-case from the Maven's perspective, imo - final DefaultArtifact rootArtifact = new DefaultArtifact(getGroupId(rootModel), - rootModel.getArtifactId(), null, rootModel.getPackaging(), getVersion(rootModel)); - try { - final MavenArtifactResolver mvn = MavenArtifactResolver.builder() - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos) - .setCurrentProject(LocalProject.loadWorkspace(rootPom.getParentFile().toPath())) - .build(); - final ArtifactDescriptorResult rootDescr = mvn.resolveDescriptor(rootArtifact); - importDeploymentBom = !hasQuarkusDeploymentBom(rootDescr.getManagedDependencies()); - // TODO determine whether the compiler plugin is configured for the project - setQuarkusVersionProp = !rootDescr.getProperties().containsKey(QUARKUS_VERSION_PROP); - } catch (Exception e) { - throw new MojoExecutionException("Failed to resolve " + rootArtifact + " descriptor", e); - } - } - } else if (this.grandParentRelativePath != null) { - // aloubyansky: not sure we should support this case, same as above - final File gpPom = getExtensionProjectBaseDir().resolve(this.grandParentRelativePath).normalize() - .toAbsolutePath().toFile(); - if (gpPom.exists()) { - rootPom = gpPom; - rootModel = MojoUtils.readPom(gpPom); - final DefaultArtifact rootArtifact = new DefaultArtifact(getGroupId(rootModel), - rootModel.getArtifactId(), null, rootModel.getPackaging(), getVersion(rootModel)); - try { - final MavenArtifactResolver mvn = MavenArtifactResolver.builder() - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos) - .setCurrentProject(LocalProject.loadWorkspace(rootPom.getParentFile().toPath())) - .build(); - final ArtifactDescriptorResult rootDescr = mvn.resolveDescriptor(rootArtifact); - importDeploymentBom = !hasQuarkusDeploymentBom(rootDescr.getManagedDependencies()); - // TODO determine whether the compiler plugin is configured for the project - setQuarkusVersionProp = !rootDescr.getProperties().containsKey(QUARKUS_VERSION_PROP); - } catch (Exception e) { - throw new MojoExecutionException("Failed to resolve " + rootArtifact + " descriptor", e); - } - } - } - - final TemplateParams templateParams = getTemplateParams(rootModel); - final Configuration cfg = getTemplateConfig(); - - generateExtensionProjects(cfg, templateParams); - if (setQuarkusVersionProp) { - setQuarkusVersionProp(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); - } - if (importDeploymentBom) { - addQuarkusDeploymentBom(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); - } - if (setCompilerPluginVersion) { - setCompilerPluginVersion(getExtensionProjectBaseDir().resolve("pom.xml").toFile()); - } - if (rootModel != null) { - addModules(rootPom.toPath(), templateParams, rootModel); - } - - if (bomPath != null) { - getLog().info( - String.format("Adding [%s] to dependencyManagement in [%s]", templateParams.artifactId, - bomPath)); - List transformations = new ArrayList(); - transformations - .add(Transformation.addManagedDependency(templateParams.groupId, templateParams.artifactId, - templateParams.bomEntryVersion)); - for (Gavtcs gavtcs : templateParams.additionalRuntimeDependencies) { - getLog().info(String.format("Adding [%s] to dependencyManagement in [%s]", gavtcs, bomPath)); - transformations.add(Transformation.addManagedDependency(gavtcs)); + extensionId = artifactId; } - final String aId = templateParams.artifactId + "-deployment"; - getLog().info(String.format("Adding [%s] to dependencyManagement in [%s]", aId, bomPath)); - transformations.add(Transformation.addManagedDependency(templateParams.groupId, aId, - templateParams.bomEntryVersion)); - - pomTransformer(bomPath).transform(transformations); - } - if (itestParentPath != null) { - generateItest(cfg, templateParams); } - } catch (IOException e) { - throw new MojoExecutionException(String.format("Could not read %s", project.getFile()), e); - } catch (TemplateException e) { - throw new MojoExecutionException(String.format("Could not process a FreeMarker template"), e); } - } - private void setQuarkusVersionProp(File pom) throws IOException, MojoExecutionException { - pomTransformer(pom.toPath()).transform(Transformation.addProperty(QUARKUS_VERSION_PROP, - quarkusVersion.equals(DEFAULT_QUARKUS_VERSION) ? getPluginVersion() : quarkusVersion)); - } - - private void setCompilerPluginVersion(File pom) throws IOException { - pomTransformer(pom.toPath()).transform(Transformation.addProperty(COMPILER_PLUGIN_VERSION_PROP, compilerPluginVersion)); - pomTransformer(pom.toPath()) - .transform(Transformation.addManagedPlugin( - MojoUtils.plugin(COMPILER_PLUGIN_GROUP_ID, COMPILER_PLUGIN_ARTIFACT_ID, - COMPILER_PLUGIN_VERSION_POM_EXPR))); - } + promptValues(); - private void addQuarkusDeploymentBom(File pom) throws IOException, MojoExecutionException { - addQuarkusDeploymentBom(MojoUtils.readPom(pom), pom); - } + autoComputeQuarkiverseExtensionId(); - private void addQuarkusDeploymentBom(Model model, File file) throws IOException, MojoExecutionException { - pomTransformer(file.toPath()) - .transform(Transformation.addManagedDependency( - new Gavtcs(platformGroupId, platformArtifactId, QUARKUS_VERSION_POM_EXPR, "pom", null, "import"))); - } + final CreateExtension createExtension = new CreateExtension(basedir.toPath()) + .extensionId(extensionId) + .extensionName(extensionName) + .groupId(groupId) + .version(version) + .packageName(packageName) + .namespaceId(namespaceId) + .namespaceName(namespaceName) + .quarkusVersion(quarkusVersion) + .quarkusBomGroupId(quarkusBomGroupId) + .quarkusBomArtifactId(quarkusBomArtifactId) + .quarkusBomGroupId(quarkusBomVersion) + .withoutUnitTest(withoutTests || withoutUnitTest) + .withoutDevModeTest(withoutTests || withoutDevModeTest) + .withoutIntegrationTests(withoutTests || withoutIntegrationTests); - private String getPluginVersion() throws MojoExecutionException { - return CreateUtils.resolvePluginInfo(CreateExtensionMojo.class).getVersion(); - } + boolean success; - private boolean hasQuarkusDeploymentBom() { - if (project.getDependencyManagement() == null) { - return false; - } - for (org.apache.maven.model.Dependency dep : project.getDependencyManagement().getDependencies()) { - if (dep.getArtifactId().equals("quarkus-core-deployment") - && dep.getGroupId().equals("io.quarkus")) { - // this is not a 100% accurate check but practically valid - return true; - } - } - return false; - } - - private boolean hasQuarkusDeploymentBom(List deps) { - if (deps == null) { - return false; - } - for (Dependency dep : deps) { - if (dep.getArtifact().getArtifactId().equals("quarkus-core-deployment") - && dep.getArtifact().getGroupId().equals("io.quarkus")) { - // this is not a 100% accurate check but practically valid - return true; - } - } - return false; - } - - /** - * @return true if the goal is executed in an existing project - */ - private boolean isCurrentProjectExists() { - return currentProjectIsBaseDir ? project.getFile() != null - : Files.exists(basedir.toPath().resolve("pom.xml")); - } - - private File getCurrentProjectPom() { - if (currentProjectIsBaseDir) { - return project.getFile() == null ? new File(project.getBasedir(), "pom.xml") : project.getFile(); + try { + success = createExtension.execute().isSuccess(); + } catch (QuarkusCommandException e) { + throw new MojoExecutionException("Failed to generate Extension", e); } - return new File(basedir, "pom.xml"); - } - private Path getExtensionProjectBaseDir() { - if (currentProjectIsBaseDir) { - return project.getBasedir() == null ? basedir.toPath().resolve(artifactIdBase) - : project.getBasedir().toPath().resolve(artifactIdBase); + if (!success) { + throw new MojoExecutionException("Failed to generate Extension"); } - return new File(basedir, artifactIdBase).toPath(); } - private Path getExtensionRuntimeBaseDir() { - return getExtensionProjectBaseDir().resolve("runtime"); - } - - private Path getExtensionDeploymentBaseDir() { - return getExtensionProjectBaseDir().resolve("deployment"); - } - - void addModules(Path basePomXml, TemplateParams templateParams, Model basePom) - throws IOException, TemplateException, MojoFailureException, MojoExecutionException { - if (!basePom.getModules().contains(templateParams.artifactIdBase)) { - getLog().info(String.format("Adding module [%s] to [%s]", templateParams.artifactIdBase, basePomXml)); - pomTransformer(basePomXml).transform(Transformation.addModule(templateParams.artifactIdBase)); + private void autoComputeQuarkiverseExtensionId() { + if (isQuarkiverseGroupId(groupId) && isEmpty(extensionId)) { + extensionId = extractQuarkiverseExtensionId(groupId); } } - private void generateExtensionProjects(Configuration cfg, TemplateParams templateParams) - throws IOException, TemplateException, MojoExecutionException { - evalTemplate(cfg, "parent-pom.xml", getExtensionProjectBaseDir().resolve("pom.xml"), templateParams); - - Files.createDirectories( - getExtensionRuntimeBaseDir().resolve("src/main/java") - .resolve(templateParams.javaPackageBase.replace('.', '/'))); - evalTemplate(cfg, "runtime-pom.xml", getExtensionRuntimeBaseDir().resolve("pom.xml"), - templateParams); - - evalTemplate(cfg, "deployment-pom.xml", getExtensionDeploymentBaseDir().resolve("pom.xml"), - templateParams); - final Path processorPath = getExtensionDeploymentBaseDir() - .resolve("src/main/java") - .resolve(templateParams.javaPackageBase.replace('.', '/')) - .resolve("deployment") - .resolve(templateParams.artifactIdBaseCamelCase + "Processor.java"); - evalTemplate(cfg, "Processor.java", processorPath, templateParams); - } - - private PomTransformer pomTransformer(Path basePomXml) { - return new PomTransformer(basePomXml, charset); - } - - private TemplateParams getTemplateParams(Model basePom) throws MojoExecutionException { - final TemplateParams templateParams = new TemplateParams(); - - templateParams.artifactId = artifactId; - templateParams.artifactIdPrefix = artifactIdPrefix; - templateParams.artifactIdBase = artifactIdBase; - templateParams.artifactIdBaseCamelCase = toCapCamelCase(templateParams.artifactIdBase); - - if (groupId == null) { - if (basePom == null) { - throw new MojoExecutionException( - "Please provide the desired groupId for the project by setting groupId parameter"); - } - templateParams.groupId = getGroupId(basePom); - } else { - templateParams.groupId = groupId; - } - - if (version == null) { - if (basePom == null) { - throw new MojoExecutionException( - "Please provide the desired version for the project by setting version parameter"); - } - templateParams.version = getVersion(basePom); - } else { - templateParams.version = version; - } - - templateParams.namePrefix = namePrefix; - templateParams.nameBase = nameBase; - templateParams.nameSegmentDelimiter = nameSegmentDelimiter; - templateParams.assumeManaged = detectAssumeManaged(); - templateParams.quarkusVersion = QUARKUS_VERSION_POM_EXPR; - templateParams.bomEntryVersion = bomEntryVersion.replace('@', '$'); - - if (basePom != null) { - templateParams.grandParentGroupId = grandParentGroupId != null ? grandParentGroupId : getGroupId(basePom); - templateParams.grandParentArtifactId = grandParentArtifactId != null ? grandParentArtifactId - : basePom.getArtifactId(); - templateParams.grandParentVersion = grandParentVersion != null ? grandParentVersion : getVersion(basePom); - templateParams.grandParentRelativePath = grandParentRelativePath != null ? grandParentRelativePath : "../pom.xml"; - } - - templateParams.javaPackageBase = javaPackageBase != null ? javaPackageBase - : getJavaPackage(templateParams.groupId, javaPackageInfix, artifactId); - templateParams.additionalRuntimeDependencies = getAdditionalRuntimeDependencies(); - templateParams.runtimeBomPathSet = bomPath != null; - return templateParams; - } - - private Configuration getTemplateConfig() throws IOException { - final Configuration templateCfg = new Configuration(Configuration.VERSION_2_3_28); - templateCfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); - templateCfg.setTemplateLoader(createTemplateLoader(basedir, templatesUriBase)); - templateCfg.setDefaultEncoding(encoding); - templateCfg.setInterpolationSyntax(Configuration.SQUARE_BRACKET_INTERPOLATION_SYNTAX); - templateCfg.setTagSyntax(Configuration.SQUARE_BRACKET_TAG_SYNTAX); - return templateCfg; - } - - void generateItest(Configuration cfg, TemplateParams model) - throws MojoFailureException, MojoExecutionException, TemplateException, IOException { - final Path itestParentAbsPath = basedir.toPath().resolve(itestParentPath); - try (Reader r = Files.newBufferedReader(itestParentAbsPath, charset)) { - final Model itestParent = new MavenXpp3Reader().read(r); - if (!"pom".equals(itestParent.getPackaging())) { - throw new MojoFailureException( - "Can add an extension integration test only under a project with packagin 'pom'; found: " - + itestParent.getPackaging() + " in " + itestParentAbsPath); - } - model.itestParentGroupId = getGroupId(itestParent); - model.itestParentArtifactId = itestParent.getArtifactId(); - model.itestParentVersion = getVersion(itestParent); - model.itestParentRelativePath = "../pom.xml"; - - final Path itestDir = itestParentAbsPath.getParent().resolve(model.artifactIdBase); - evalTemplate(cfg, "integration-test-pom.xml", itestDir.resolve("pom.xml"), model); - evalTemplate(cfg, "integration-test-application.properties", - itestDir.resolve("src/main/resources/application.properties"), model); - - final Path testResourcePath = itestDir.resolve("src/main/java/" + model.javaPackageBase.replace('.', '/') - + "/it/" + model.artifactIdBaseCamelCase + "Resource.java"); - evalTemplate(cfg, "TestResource.java", testResourcePath, model); - final Path testClassDir = itestDir - .resolve("src/test/java/" + model.javaPackageBase.replace('.', '/') + "/it"); - evalTemplate(cfg, "Test.java", testClassDir.resolve(model.artifactIdBaseCamelCase + "Test.java"), - model); - evalTemplate(cfg, "IT.java", testClassDir.resolve(model.artifactIdBaseCamelCase + "IT.java"), - model); - - getLog().info(String.format("Adding module [%s] to [%s]", model.artifactIdBase, itestParentAbsPath)); - pomTransformer(itestParentAbsPath).transform(Transformation.addModule(model.artifactIdBase)); - - } catch (IOException e) { - throw new MojoExecutionException(String.format("Could not read %s", itestParentAbsPath), e); - } catch (XmlPullParserException e) { - throw new MojoExecutionException(String.format("Could not parse %s", itestParentAbsPath), e); - } + private String getPluginVersion() throws MojoExecutionException { + return CreateUtils.resolvePluginInfo(CreateExtensionLegacyMojo.class).getVersion(); } - private List getAdditionalRuntimeDependencies() { - final List result = new ArrayList<>(); - if (additionalRuntimeDependencies != null && !additionalRuntimeDependencies.isEmpty()) { - for (String rawGavtc : additionalRuntimeDependencies) { - rawGavtc = replacePlaceholders(rawGavtc); - result.add(Gavtcs.of(rawGavtc)); - } + private void promptValues() throws MojoExecutionException { + if (!session.getRequest().isInteractiveMode()) { + return; } - return result; - } + try { - private String replacePlaceholders(String gavtc) { - final StringBuffer transformedGavtc = new StringBuffer(); - final Matcher m = PLACEHOLDER_PATTERN.matcher(gavtc); - while (m.find()) { - final String key = m.group(1); - if ("$".equals(key)) { - m.appendReplacement(transformedGavtc, QUOTED_DOLLAR); - } else if (key.startsWith("quarkus.")) { - final String fieldName = key.substring("quarkus.".length()); - try { - final Field field = this.getClass().getDeclaredField(fieldName); - Object val = field.get(this); - if (val != null) { - m.appendReplacement(transformedGavtc, String.valueOf(val)); - } - } catch (NoSuchFieldException | SecurityException | IllegalArgumentException - | IllegalAccessException e) { - throw new RuntimeException(e); + if (project == null || !project.getArtifactId().endsWith("quarkus-parent")) { + if (isBlank(quarkusVersion)) { + quarkusVersion = getPluginVersion(); } - } else { - final Object val = project.getProperties().get(key); - if (val != null) { - m.appendReplacement(transformedGavtc, String.valueOf(val)); + if (isBlank(groupId)) { + groupId = prompter.promptWithDefaultValue("Set the extension groupId", "org.acme"); } } - } - m.appendTail(transformedGavtc); - return transformedGavtc.toString(); - } - - boolean detectAssumeManaged() { - return assumeManaged == null ? false : assumeManaged; - } - - static String getGroupId(Model basePom) { - return basePom.getGroupId() != null ? basePom.getGroupId() - : basePom.getParent() != null && basePom.getParent().getGroupId() != null - ? basePom.getParent().getGroupId() - : null; - } - - static String getVersion(Model basePom) { - return basePom.getVersion() != null ? basePom.getVersion() - : basePom.getParent() != null && basePom.getParent().getVersion() != null - ? basePom.getParent().getVersion() - : null; - } - - static String toCapCamelCase(String artifactIdBase) { - final StringBuilder sb = new StringBuilder(artifactIdBase.length()); - for (String segment : artifactIdBase.split("[.\\-]+")) { - sb.append(Character.toUpperCase(segment.charAt(0))); - if (segment.length() > 1) { - sb.append(segment.substring(1)); - } - } - return sb.toString(); - } - - static String toCapWords(String artifactIdBase) { - final StringBuilder sb = new StringBuilder(artifactIdBase.length()); - for (String segment : artifactIdBase.split("[.\\-]+")) { - if (sb.length() > 0) { - sb.append(' '); - } - sb.append(Character.toUpperCase(segment.charAt(0))); - if (segment.length() > 1) { - sb.append(segment.substring(1)); - } - } - return sb.toString(); - } - - static String getJavaPackage(String groupId, String javaPackageInfix, String artifactId) { - final Stack segments = new Stack<>(); - for (String segment : groupId.split("[.\\-]+")) { - if (segments.isEmpty() || !segments.peek().equals(segment)) { - segments.add(segment); - } - } - if (javaPackageInfix != null) { - for (String segment : javaPackageInfix.split("[.\\-]+")) { - segments.add(segment); - } - } - for (String segment : artifactId.split("[.\\-]+")) { - if (!segments.contains(segment)) { - segments.add(segment); + autoComputeQuarkiverseExtensionId(); + if (isBlank(extensionId)) { + extensionId = prompter.prompt("Set the extension id"); } - } - return segments.stream() // - .map(s -> s.toLowerCase(Locale.ROOT)) // - .map(s -> SourceVersion.isKeyword(s) ? s + "_" : s) // - .collect(Collectors.joining(".")); - } - - static TemplateLoader createTemplateLoader(File basedir, String templatesUriBase) throws IOException { - final TemplateLoader defaultLoader = new ClassTemplateLoader(CreateExtensionMojo.class, - DEFAULT_TEMPLATES_URI_BASE.substring(CLASSPATH_PREFIX.length())); - if (DEFAULT_TEMPLATES_URI_BASE.equals(templatesUriBase)) { - return defaultLoader; - } else if (templatesUriBase.startsWith(CLASSPATH_PREFIX)) { - return new MultiTemplateLoader( // - new TemplateLoader[] { // - new ClassTemplateLoader(CreateExtensionMojo.class, - templatesUriBase.substring(CLASSPATH_PREFIX.length())), // - defaultLoader // - }); - } else if (templatesUriBase.startsWith(FILE_PREFIX)) { - final Path resolvedTemplatesDir = basedir.toPath().resolve(templatesUriBase.substring(FILE_PREFIX.length())); - return new MultiTemplateLoader( // - new TemplateLoader[] { // - new FileTemplateLoader(resolvedTemplatesDir.toFile()), - defaultLoader // - }); - } else { - throw new IllegalStateException(String.format( - "Cannot handle templatesUriBase '%s'; only value starting with '%s' or '%s' are supported", - templatesUriBase, CLASSPATH_PREFIX, FILE_PREFIX)); - } - } - - static void evalTemplate(Configuration cfg, String templateUri, Path dest, TemplateParams model) - throws IOException, TemplateException { - log.infof("Adding '%s'", dest); - final Template template = cfg.getTemplate(templateUri); - Files.createDirectories(dest.getParent()); - try (Writer out = Files.newBufferedWriter(dest)) { - template.process(model, out); - } - } - - static String artifactIdBase(String artifactId) { - final int lBPos = artifactId.indexOf('('); - final int rBPos = artifactId.indexOf(')'); - if (lBPos >= 0 && rBPos >= 0) { - return artifactId.substring(lBPos + 1, rBPos); - } else { - return artifactId; - } - } - - public void setItestParentPath(String itestParentPath) { - this.itestParentPath = Paths.get(itestParentPath); - } - - public static class TemplateParams { - String grandParentRelativePath; - String grandParentVersion; - String grandParentArtifactId; - String grandParentGroupId; - String itestParentRelativePath; - String itestParentVersion; - String itestParentArtifactId; - String itestParentGroupId; - String groupId; - String artifactId; - String artifactIdPrefix; - String artifactIdBase; - String artifactIdBaseCamelCase; - String version; - String namePrefix; - String nameBase; - String nameSegmentDelimiter; - String javaPackageBase; - boolean assumeManaged; - String quarkusVersion; - List additionalRuntimeDependencies; - boolean runtimeBomPathSet; - String bomEntryVersion; - - public String getJavaPackageBase() { - return javaPackageBase; - } - - public boolean isAssumeManaged() { - return assumeManaged; - } - - public String getArtifactIdPrefix() { - return artifactIdPrefix; - } - - public String getArtifactIdBase() { - return artifactIdBase; - } - - public String getNamePrefix() { - return namePrefix; - } - - public String getNameBase() { - return nameBase; - } - - public String getNameSegmentDelimiter() { - return nameSegmentDelimiter; - } - - public String getArtifactIdBaseCamelCase() { - return artifactIdBaseCamelCase; - } - - public String getQuarkusVersion() { - return quarkusVersion; - } - - public String getGrandParentRelativePath() { - return grandParentRelativePath; - } - - public String getGrandParentVersion() { - return grandParentVersion; - } - - public String getGrandParentArtifactId() { - return grandParentArtifactId; - } - - public String getGrandParentGroupId() { - return grandParentGroupId; - } - - public String getGroupId() { - return groupId; - } - - public String getVersion() { - return version; - } - - public String getArtifactId() { - return artifactId; - } - - public List getAdditionalRuntimeDependencies() { - return additionalRuntimeDependencies; - } - - public boolean isRuntimeBomPathSet() { - return runtimeBomPathSet; - } - - public String getItestParentRelativePath() { - return itestParentRelativePath; - } - - public String getItestParentVersion() { - return itestParentVersion; - } - - public String getItestParentArtifactId() { - return itestParentArtifactId; - } - - public String getItestParentGroupId() { - return itestParentGroupId; + } catch (IOException e) { + throw new MojoExecutionException("Unable to get user input", e); } } } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateJBangMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateJBangMojo.java new file mode 100644 index 0000000000000..347346062a455 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateJBangMojo.java @@ -0,0 +1,129 @@ +package io.quarkus.maven; + +import static org.fusesource.jansi.Ansi.ansi; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Component; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.impl.RemoteRepositoryManager; +import org.eclipse.aether.repository.RemoteRepository; + +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; +import io.quarkus.devtools.commands.CreateJBangProject; +import io.quarkus.devtools.commands.data.QuarkusCommandException; +import io.quarkus.devtools.messagewriter.MessageWriter; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import io.quarkus.platform.tools.maven.MojoMessageWriter; +import io.quarkus.registry.catalog.ExtensionCatalog; + +@Mojo(name = "create-jbang", requiresProject = false) +public class CreateJBangMojo extends AbstractMojo { + + @Parameter(property = "noJBangWrapper", defaultValue = "false") + private boolean noJBangWrapper; + + /** + * Group ID of the target platform BOM + */ + @Parameter(property = "platformGroupId", required = false) + private String bomGroupId; + + /** + * Artifact ID of the target platform BOM + */ + @Parameter(property = "platformArtifactId", required = false) + private String bomArtifactId; + + /** + * Version of the target platform BOM + */ + @Parameter(property = "platformVersion", required = false) + private String bomVersion; + + @Parameter(property = "extensions") + private Set extensions; + + @Parameter(property = "outputDirectory", defaultValue = "${basedir}/jbang-with-quarkus") + private File outputDirectory; + + @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) + private List repos; + + @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) + private RepositorySystemSession repoSession; + + @Component + private RepositorySystem repoSystem; + + @Component + RemoteRepositoryManager remoteRepoManager; + + @Override + public void execute() throws MojoExecutionException { + try { + Files.createDirectories(outputDirectory.toPath()); + } catch (IOException e) { + throw new MojoExecutionException("Could not create directory " + outputDirectory, e); + } + + File projectRoot = outputDirectory; + final Path projectDirPath = projectRoot.toPath(); + + final MavenArtifactResolver mvn; + try { + mvn = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setRemoteRepositoryManager(remoteRepoManager) + .build(); + } catch (Exception e) { + throw new MojoExecutionException("Failed to initialize Maven artifact resolver", e); + } + + final MessageWriter log = new MojoMessageWriter(getLog()); + final ExtensionCatalog catalog = CreateProjectMojo.resolveExtensionsCatalog( + StringUtils.defaultIfBlank(bomGroupId, null), + StringUtils.defaultIfBlank(bomArtifactId, null), + StringUtils.defaultIfBlank(bomVersion, null), + QuarkusProjectHelper.getCatalogResolver(mvn, log), mvn, log); + + final CreateJBangProject createJBangProject = new CreateJBangProject(QuarkusProject.of(projectDirPath, catalog, + QuarkusProjectHelper.getResourceLoader(catalog, mvn), log, BuildTool.MAVEN)) + .extensions(extensions) + .setValue("noJBangWrapper", noJBangWrapper); + + boolean success; + + try { + success = createJBangProject.execute().isSuccess(); + } catch (QuarkusCommandException e) { + throw new MojoExecutionException("Failed to generate JBang Quarkus project", e); + } + + if (success) { + getLog().info(""); + getLog().info("========================================================================"); + getLog().warn(ansi().a("Quarkus JBang project is an experimental feature.").toString()); + getLog().info("========================================================================"); + getLog().info(""); + } else { + throw new MojoExecutionException( + "Failed to generate JBang Quarkus project"); + } + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java index c8f45861476c0..0b77f4d46c119 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java @@ -1,56 +1,55 @@ package io.quarkus.maven; import static org.fusesource.jansi.Ansi.ansi; -import static org.twdata.maven.mojoexecutor.MojoExecutor.artifactId; -import static org.twdata.maven.mojoexecutor.MojoExecutor.configuration; -import static org.twdata.maven.mojoexecutor.MojoExecutor.element; -import static org.twdata.maven.mojoexecutor.MojoExecutor.executeMojo; -import static org.twdata.maven.mojoexecutor.MojoExecutor.executionEnvironment; -import static org.twdata.maven.mojoexecutor.MojoExecutor.goal; -import static org.twdata.maven.mojoexecutor.MojoExecutor.groupId; -import static org.twdata.maven.mojoexecutor.MojoExecutor.name; -import static org.twdata.maven.mojoexecutor.MojoExecutor.plugin; -import static org.twdata.maven.mojoexecutor.MojoExecutor.version; +import java.io.BufferedWriter; import java.io.File; import java.io.IOException; +import java.io.StringWriter; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Locale; import java.util.Objects; -import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.apache.maven.execution.DefaultMavenExecutionRequest; -import org.apache.maven.execution.MavenExecutionRequest; import org.apache.maven.execution.MavenSession; +import org.apache.maven.model.Model; +import org.apache.maven.model.Parent; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.BuildPluginManager; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; -import org.apache.maven.project.DefaultProjectBuildingRequest; import org.apache.maven.project.MavenProject; import org.apache.maven.project.ProjectBuilder; -import org.apache.maven.settings.Proxy; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.impl.RemoteRepositoryManager; import org.eclipse.aether.repository.RemoteRepository; import org.fusesource.jansi.Ansi; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.devtools.commands.CreateProject; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.devtools.project.QuarkusProjectHelper; import io.quarkus.devtools.project.codegen.SourceType; import io.quarkus.maven.components.MavenVersionEnforcer; import io.quarkus.maven.components.Prompter; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; +import io.quarkus.maven.utilities.MojoUtils; import io.quarkus.platform.tools.ToolsUtils; +import io.quarkus.platform.tools.maven.MojoMessageWriter; +import io.quarkus.registry.ExtensionCatalogResolver; +import io.quarkus.registry.RegistryResolutionException; +import io.quarkus.registry.catalog.ExtensionCatalog; +import io.quarkus.registry.catalog.Platform; +import io.quarkus.registry.catalog.PlatformCatalog; /** * This goal helps in setting up Quarkus Maven project with quarkus-maven-plugin, with sensible defaults @@ -58,9 +57,10 @@ @Mojo(name = "create", requiresProject = false) public class CreateProjectMojo extends AbstractMojo { - final private static boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("windows"); - - private static final String DEFAULT_GROUP_ID = "org.acme.quarkus.sample"; + private static final String DEFAULT_GROUP_ID = "org.acme"; + private static final String DEFAULT_ARTIFACT_ID = "code-with-quarkus"; + private static final String DEFAULT_VERSION = "1.0.0-SNAPSHOT"; + private static final String DEFAULT_EXTENSIONS = "resteasy"; @Parameter(defaultValue = "${project}") protected MavenProject project; @@ -74,6 +74,18 @@ public class CreateProjectMojo extends AbstractMojo { @Parameter(property = "projectVersion") private String projectVersion; + /** + * When true, do not include any example code in the generated Quarkus project. + */ + @Parameter(property = "noExamples", defaultValue = "false") + private boolean noExamples; + + /** + * Choose which example(s) you want in the generated Quarkus application. + */ + @Parameter(property = "examples") + private Set examples; + /** * Group ID of the target platform BOM */ @@ -92,12 +104,51 @@ public class CreateProjectMojo extends AbstractMojo { @Parameter(property = "platformVersion", required = false) private String bomVersion; + /** + * The {@link #path} will define the REST path of the generated code when picking only one of those extensions resteasy, + * resteasy-reactive and spring-web. + *
    + * If more than one of those extensions are picked, this parameter will be ignored. + *
    + * This is @Deprecated because using a generic path parameters with multiple example does not make sense and lead to + * confusion. + * More info: https://github.com/quarkusio/quarkus/issues/14437 + *
    + * {@code className} + */ @Parameter(property = "path") + @Deprecated private String path; + /** + * The {@link #className} will define the generated class names when picking only one of those extensions resteasy, + * resteasy-reactive and spring-web. + *
    + * If more than one of those extensions are picked, then only the package name part will be used as {@link #packageName} + *
    + * This is @Deprecated because using a generic className parameters with multiple example does not make sense and lead to + * confusion. + * More info: https://github.com/quarkusio/quarkus/issues/14437 + *
    + * By default, the {@link #projectGroupId} is used as package for generated classes (you can also use {@link #packageName} + * to have them different). + *
    + * {@code className} + */ @Parameter(property = "className") + @Deprecated private String className; + /** + * Set the package name of the generated classes. + *
    + * If not set, {@link #projectGroupId} will be used as {@link #packageName} + * + * {@code packageName} + */ + @Parameter(property = "packageName") + private String packageName; + @Parameter(property = "buildTool", defaultValue = "MAVEN") private String buildTool; @@ -131,21 +182,15 @@ public class CreateProjectMojo extends AbstractMojo { @Component private RepositorySystem repoSystem; + @Component + RemoteRepositoryManager remoteRepoManager; + + @Parameter(property = "enableRegistryClient") + private boolean enableRegistryClient; + @Override public void execute() throws MojoExecutionException { - final MavenArtifactResolver mvn; - try { - mvn = MavenArtifactResolver.builder() - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos).build(); - } catch (Exception e1) { - throw new MojoExecutionException("Failed to initialize Maven artifact resolver", e1); - } - final QuarkusPlatformDescriptor platform = CreateUtils.resolvePlatformDescriptor(bomGroupId, bomArtifactId, bomVersion, - mvn, getLog()); - // We detect the Maven version during the project generation to indicate the user immediately that the installed // version may not be supported. mavenVersionEnforcer.ensureMavenVersion(getLog(), session); @@ -154,18 +199,74 @@ public void execute() throws MojoExecutionException { } catch (IOException e) { throw new MojoExecutionException("Could not create directory " + outputDirectory, e); } + + final MavenArtifactResolver mvn; + try { + mvn = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setRemoteRepositoryManager(remoteRepoManager) + .build(); + } catch (Exception e) { + throw new MojoExecutionException("Failed to initialize Maven artifact resolver", e); + } + final MojoMessageWriter log = new MojoMessageWriter(getLog()); + final ExtensionCatalogResolver catalogResolver = enableRegistryClient + ? QuarkusProjectHelper.getCatalogResolver(mvn, log) + : ExtensionCatalogResolver.empty(); + + final ExtensionCatalog catalog = resolveExtensionsCatalog( + StringUtils.defaultIfBlank(bomGroupId, null), + StringUtils.defaultIfBlank(bomArtifactId, null), + StringUtils.defaultIfBlank(bomVersion, null), + catalogResolver, mvn, log); + File projectRoot = outputDirectory; - File pom = new File(projectRoot, "pom.xml"); + File pom = project != null ? project.getFile() : null; + Model parentPomModel = null; - if (pom.isFile()) { - throw new MojoExecutionException("Unable to generate the project in a directory that already contains a pom.xml"); - } else { - askTheUserForMissingValues(); - projectRoot = new File(outputDirectory, projectArtifactId); - if (projectRoot.exists()) { - throw new MojoExecutionException("Unable to create the project, " + - "the directory " + projectRoot.getAbsolutePath() + " already exists"); + boolean containsAtLeastOneGradleFile = false; + for (String gradleFile : Arrays.asList("build.gradle", "settings.gradle", "build.gradle.kts", "settings.gradle.kts")) { + containsAtLeastOneGradleFile |= new File(projectRoot, gradleFile).isFile(); + } + + BuildTool buildToolEnum = BuildTool.findTool(buildTool); + if (buildToolEnum == null) { + String validBuildTools = String.join(",", + Arrays.asList(BuildTool.values()).stream().map(BuildTool::toString).collect(Collectors.toList())); + throw new IllegalArgumentException("Choose a valid build tool. Accepted values are: " + validBuildTools); + } + if (BuildTool.MAVEN.equals(buildToolEnum)) { + if (pom != null && pom.isFile()) { + try { + parentPomModel = MojoUtils.readPom(pom); + if (!"pom".equals(parentPomModel.getPackaging())) { + throw new MojoExecutionException( + "The parent project must have a packaging type of POM. Current packaging: " + + parentPomModel.getPackaging()); + } + } catch (IOException e) { + throw new MojoExecutionException("Could not access parent pom.", e); + } + } else if (containsAtLeastOneGradleFile) { + throw new MojoExecutionException( + "You are trying to create maven project in a directory that contains only gradle build files."); } + } else if (BuildTool.GRADLE.equals(buildToolEnum) || BuildTool.GRADLE_KOTLIN_DSL.equals(buildToolEnum)) { + if (containsAtLeastOneGradleFile) { + throw new MojoExecutionException("Adding subprojects to gradle projects is not implemented."); + } else if (pom != null && pom.isFile()) { + throw new MojoExecutionException( + "You are trying to create gradle project in a directory that contains only maven build files."); + } + } + + askTheUserForMissingValues(); + projectRoot = new File(outputDirectory, projectArtifactId); + if (projectRoot.exists()) { + throw new MojoExecutionException("Unable to create the project, " + + "the directory " + projectRoot.getAbsolutePath() + " already exists"); } boolean success; @@ -175,33 +276,37 @@ public void execute() throws MojoExecutionException { final SourceType sourceType = CreateProject.determineSourceType(extensions); sanitizeOptions(sourceType); - BuildTool buildToolEnum; - try { - buildToolEnum = BuildTool.valueOf(buildTool.toUpperCase()); - } catch (IllegalArgumentException e) { - String validBuildTools = String.join(",", - Arrays.asList(BuildTool.values()).stream().map(BuildTool::toString).collect(Collectors.toList())); - throw new IllegalArgumentException("Choose a valid build tool. Accepted values are: " + validBuildTools); - } - final CreateProject createProject = new CreateProject(projectDirPath, platform) - .buildTool(buildToolEnum) + QuarkusProject newProject = QuarkusProject.of(projectDirPath, catalog, + QuarkusProjectHelper.getResourceLoader(catalog, mvn), log, buildToolEnum); + final CreateProject createProject = new CreateProject(newProject) .groupId(projectGroupId) .artifactId(projectArtifactId) .version(projectVersion) .sourceType(sourceType) .className(className) - .extensions(extensions); + .packageName(packageName) + .extensions(extensions) + .overrideExamples(examples) + .noExamples(noExamples); if (path != null) { createProject.setValue("path", path); } success = createProject.execute().isSuccess(); - - File createdDependenciesBuildFile = new File(projectRoot, buildToolEnum.getDependenciesFile()); - if (BuildTool.MAVEN.equals(buildToolEnum)) { - createMavenWrapper(createdDependenciesBuildFile, ToolsUtils.readQuarkusProperties(platform)); - } else if (BuildTool.GRADLE.equals(buildToolEnum)) { - createGradleWrapper(platform, projectDirPath); + if (success && parentPomModel != null && BuildTool.MAVEN.equals(buildToolEnum)) { + // Write to parent pom and submodule pom if project creation is successful + if (!parentPomModel.getModules().contains(this.projectArtifactId)) { + parentPomModel.addModule(this.projectArtifactId); + } + File subModulePomFile = new File(projectRoot, buildToolEnum.getDependenciesFile()); + Model subModulePomModel = MojoUtils.readPom(subModulePomFile); + Parent parent = new Parent(); + parent.setGroupId(parentPomModel.getGroupId()); + parent.setArtifactId(parentPomModel.getArtifactId()); + parent.setVersion(parentPomModel.getVersion()); + subModulePomModel.setParent(parent); + MojoUtils.write(parentPomModel, pom); + MojoUtils.write(subModulePomModel, subModulePomFile); } } catch (Exception e) { throw new MojoExecutionException("Failed to generate Quarkus project", e); @@ -214,80 +319,70 @@ public void execute() throws MojoExecutionException { } } - private void createGradleWrapper(QuarkusPlatformDescriptor platform, Path projectDirPath) { - try { - Files.createDirectories(projectDirPath.resolve("gradle/wrapper")); - - for (String filename : CreateUtils.GRADLE_WRAPPER_FILES) { - byte[] fileContent = platform.loadResource(CreateUtils.GRADLE_WRAPPER_PATH + '/' + filename, - is -> { - byte[] buffer = new byte[is.available()]; - is.read(buffer); - return buffer; - }); - final Path destination = projectDirPath.resolve(filename); - Files.write(destination, fileContent); - } - - projectDirPath.resolve("gradlew").toFile().setExecutable(true); - projectDirPath.resolve("gradlew.bat").toFile().setExecutable(true); - } catch (IOException e) { - getLog().error("Unable to copy Gradle wrapper from platform descriptor", e); + static ExtensionCatalog resolveExtensionsCatalog(String groupId, String artifactId, String version, + ExtensionCatalogResolver catalogResolver, MavenArtifactResolver artifactResolver, MessageWriter log) + throws MojoExecutionException { + + if (!catalogResolver.hasRegistries()) { + // TODO: this should normally result in an error, however for the time being + // until we get the registry service up and running this will allow + // a fall back to the legacy way of resolving the default platform catalog directly + return ToolsUtils.resolvePlatformDescriptorDirectly(groupId, artifactId, + version == null ? CreateUtils.resolvePluginInfo(CreateUtils.class).getVersion() : version, artifactResolver, + log); } - } - private void createMavenWrapper(File createdPomFile, Properties props) { try { - // we need to modify the maven environment used by the wrapper plugin since the project could have been - // created in a directory other than the current - MavenProject newProject = projectBuilder.build( - createdPomFile, new DefaultProjectBuildingRequest(session.getProjectBuildingRequest())).getProject(); - - MavenExecutionRequest newExecutionRequest = DefaultMavenExecutionRequest.copy(session.getRequest()); - newExecutionRequest.setBaseDirectory(createdPomFile.getParentFile()); - - MavenSession newSession = new MavenSession(session.getContainer(), session.getRepositorySession(), - newExecutionRequest, session.getResult()); - newSession.setCurrentProject(newProject); - - setProxySystemPropertiesFromSession(); - - executeMojo( - plugin( - groupId("io.takari"), - artifactId("maven"), - version(ToolsUtils.getMavenWrapperVersion(props))), - goal("wrapper"), - configuration( - element(name("maven"), ToolsUtils.getProposedMavenVersion(props))), - executionEnvironment( - newProject, - newSession, - pluginManager)); - } catch (Exception e) { - // no reason to fail if the wrapper could not be created - getLog().error("Unable to install the Maven wrapper (./mvnw) in the project", e); - } - } - - private void setProxySystemPropertiesFromSession() { - List proxiesFromSession = session.getRequest().getProxies(); - // - takari maven uses https to download the maven wrapper - // - don't do anything if proxy system property is already set - if (!proxiesFromSession.isEmpty() && System.getProperty("https.proxyHost") == null) { - - // use the first active proxy for setting the system properties - proxiesFromSession.stream() - .filter(Proxy::isActive) - .findFirst() - .ifPresent(proxy -> { - // note: a http proxy _is_ usable as https.proxyHost - System.setProperty("https.proxyHost", proxy.getHost()); - System.setProperty("https.proxyPort", String.valueOf(proxy.getPort())); - if (proxy.getNonProxyHosts() != null) { - System.setProperty("http.nonProxyHosts", proxy.getNonProxyHosts()); - } - }); + if (groupId == null && artifactId == null && version == null) { + return catalogResolver.resolveExtensionCatalog(); + } + final PlatformCatalog platformsCatalog = catalogResolver.resolvePlatformCatalog(); + if (platformsCatalog == null) { + throw new MojoExecutionException( + "No platforms are available. Please make sure your .quarkus/config.yaml configuration includes proper extensions registry configuration"); + } + ArtifactCoords matchedBom = null; + List matchedBoms = null; + for (Platform p : platformsCatalog.getPlatforms()) { + final ArtifactCoords bom = p.getBom(); + if (version != null && !bom.getVersion().equals(version)) { + continue; + } + if (artifactId != null && !bom.getArtifactId().equals(artifactId)) { + continue; + } + if (groupId != null && !bom.getGroupId().equals(groupId)) { + continue; + } + if (matchedBom != null) { + if (matchedBoms == null) { + matchedBoms = new ArrayList<>(); + matchedBoms.add(matchedBom); + } + matchedBoms.add(bom); + } else { + matchedBom = bom; + } + } + if (matchedBoms != null) { + StringWriter buf = new StringWriter(); + buf.append("Multiple platforms were matching the requested platform BOM coordinates "); + buf.append(groupId == null ? "*" : groupId).append(':'); + buf.append(artifactId == null ? "*" : artifactId).append(':'); + buf.append(version == null ? "*" : version).append(": "); + try (BufferedWriter writer = new BufferedWriter(buf)) { + for (ArtifactCoords bom : matchedBoms) { + writer.newLine(); + writer.append("- ").append(bom.toString()); + } + } catch (IOException e) { + // + } + throw new MojoExecutionException(buf.toString()); + } + return catalogResolver.resolveExtensionCatalog(Arrays.asList(matchedBom)); + } catch (RegistryResolutionException e) { + throw new MojoExecutionException("Failed to resolve the extensions catalog", e); } } @@ -296,15 +391,15 @@ private void askTheUserForMissingValues() throws MojoExecutionException { // If the user has disabled the interactive mode or if the user has specified the artifactId, disable the // user interactions. if (!session.getRequest().isInteractiveMode() || shouldUseDefaults()) { - // Inject default values in all non-set parameters + if (StringUtils.isBlank(projectArtifactId)) { + // we need to set it for the project directory + projectArtifactId = DEFAULT_ARTIFACT_ID; + } if (StringUtils.isBlank(projectGroupId)) { projectGroupId = DEFAULT_GROUP_ID; } - if (StringUtils.isBlank(projectArtifactId)) { - projectArtifactId = "my-quarkus-project"; - } if (StringUtils.isBlank(projectVersion)) { - projectVersion = "1.0-SNAPSHOT"; + projectVersion = DEFAULT_VERSION; } return; } @@ -317,30 +412,27 @@ private void askTheUserForMissingValues() throws MojoExecutionException { if (StringUtils.isBlank(projectArtifactId)) { projectArtifactId = prompter.promptWithDefaultValue("Set the project artifactId", - "my-quarkus-project"); + DEFAULT_ARTIFACT_ID); } if (StringUtils.isBlank(projectVersion)) { projectVersion = prompter.promptWithDefaultValue("Set the project version", - "1.0-SNAPSHOT"); + DEFAULT_VERSION); } - if (StringUtils.isBlank(className)) { - // Ask the user if he want to create a resource - String answer = prompter.promptWithDefaultValue("Do you want to create a REST resource? (y/n)", "no"); - if (isTrueOrYes(answer)) { - String defaultResourceName = projectGroupId.replace("-", ".") - .replace("_", ".") + ".HelloResource"; - className = prompter.promptWithDefaultValue("Set the resource classname", defaultResourceName); - if (StringUtils.isBlank(path)) { - path = prompter.promptWithDefaultValue("Set the resource path ", CreateUtils.getDerivedPath(className)); - } - } else { - className = null; - path = null; + if (examples.isEmpty()) { + if (extensions.isEmpty()) { + extensions = Arrays + .stream(prompter + .promptWithDefaultValue("What extensions do you wish to add (comma separated list)", + DEFAULT_EXTENSIONS) + .split(",")) + .map(String::trim).filter(StringUtils::isNotEmpty).collect(Collectors.toSet()); } + String answer = prompter.promptWithDefaultValue( + "Do you want example code to get started (yes), or just an empty project (no)", "yes"); + noExamples = answer.startsWith("n"); } - } catch (IOException e) { throw new MojoExecutionException("Unable to get user input", e); } @@ -352,22 +444,17 @@ private boolean shouldUseDefaults() { } - private boolean isTrueOrYes(String answer) { - if (answer == null) { - return false; - } - String content = answer.trim().toLowerCase(); - return "true".equalsIgnoreCase(content) || "yes".equalsIgnoreCase(content) || "y".equalsIgnoreCase(content); - } - private void sanitizeOptions(SourceType sourceType) { - // If className is null, we won't create the REST resource, if (className != null) { className = sourceType.stripExtensionFrom(className); - if (!className.contains(".")) { - // No package name, inject one - className = projectGroupId.replace("-", ".").replace("_", ".") + "." + className; + int idx = className.lastIndexOf('.'); + if (idx >= 0 && StringUtils.isBlank(packageName)) { + // if it's a full qualified class name, we use the package name part (only if the packageName wasn't already defined) + packageName = className.substring(0, idx); + + // And we strip it from the className + className = className.substring(idx + 1); } if (StringUtils.isBlank(path)) { @@ -376,6 +463,7 @@ private void sanitizeOptions(SourceType sourceType) { path = "/" + path; } } + // if package name is empty, the groupId will be used as part of the CreateProject logic } private void sanitizeExtensions() { @@ -399,5 +487,4 @@ private void printUserInstructions(File root) { getLog().info("========================================================================================"); getLog().info(""); } - } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateUtils.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateUtils.java index f55f3639dced5..1b5b8896cc3d2 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/CreateUtils.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateUtils.java @@ -11,25 +11,18 @@ import org.apache.commons.lang3.StringUtils; import org.apache.maven.model.Plugin; import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.logging.Log; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; -import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; -import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.bootstrap.util.ZipUtils; import io.quarkus.maven.utilities.MojoUtils; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.resolver.json.QuarkusJsonPlatformDescriptorResolver; -import io.quarkus.platform.tools.config.QuarkusPlatformConfig; -import io.quarkus.platform.tools.maven.MojoMessageWriter; public final class CreateUtils { public static final String DEFAULT_PLATFORM_BOM_GROUP_ID = "io.quarkus"; public static final String QUARKUS_CORE_BOM_ARTIFACT_ID = "quarkus-bom"; - public static final String DEFAULT_PLATFORM_BOM_ARTIFACT_ID = QUARKUS_CORE_BOM_ARTIFACT_ID; + public static final String DEFAULT_PLATFORM_BOM_ARTIFACT_ID = "quarkus-universe-bom"; public static final String GRADLE_WRAPPER_PATH = "gradle-wrapper"; public static final String[] GRADLE_WRAPPER_FILES = new String[] { @@ -43,59 +36,6 @@ private CreateUtils() { //Not to be constructed } - private static boolean isVersionRange(String versionStr) { - if (versionStr == null || versionStr.isEmpty()) { - return false; - } - char c = versionStr.charAt(0); - if (c == '[' || c == '(') { - return true; - } - c = versionStr.charAt(versionStr.length() - 1); - if (c == ']' || c == ')') { - return true; - } - return versionStr.indexOf(',') >= 0; - } - - static QuarkusPlatformDescriptor setGlobalPlatformDescriptor(final String bomGroupId, final String bomArtifactId, - final String bomVersion, - MavenArtifactResolver mvn, Log log) throws MojoExecutionException { - final QuarkusPlatformDescriptor platform = resolvePlatformDescriptor(bomGroupId, bomArtifactId, bomVersion, mvn, log); - QuarkusPlatformConfig.defaultConfigBuilder().setPlatformDescriptor(platform).build(); - return platform; - } - - static QuarkusPlatformDescriptor resolvePlatformDescriptor(final String bomGroupId, final String bomArtifactId, - final String bomVersion, MavenArtifactResolver mvn, Log log) throws MojoExecutionException { - final QuarkusJsonPlatformDescriptorResolver platformResolver = QuarkusJsonPlatformDescriptorResolver.newInstance() - .setMessageWriter(new MojoMessageWriter(log)) - .setArtifactResolver(new BootstrapAppModelResolver(mvn)); - - String groupId = StringUtils.defaultIfBlank(bomGroupId, null); - String artifactId = StringUtils.defaultIfBlank(bomArtifactId, null); - String version = StringUtils.defaultIfBlank(bomVersion, null); - - if (CreateUtils.QUARKUS_CORE_BOM_ARTIFACT_ID.equals(artifactId) - && version == null) { - version = resolvePluginInfo(CreateUtils.class).getVersion(); - } - - final QuarkusPlatformDescriptor platform; - if (version == null) { - if (artifactId == null && groupId == null) { - platform = platformResolver.resolve(); - } else { - platform = platformResolver.resolveLatestFromBom(groupId, artifactId, null); - } - } else if (isVersionRange(version)) { - platform = platformResolver.resolveLatestFromBom(groupId, artifactId, version); - } else { - platform = platformResolver.resolveFromBom(groupId, artifactId, version); - } - return platform; - } - public static String getDerivedPath(String className) { String[] resourceClassName = StringUtils.splitByCharacterTypeCamelCase( className.substring(className.lastIndexOf(".") + 1)); diff --git a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java index ae198823337e5..0978339a70477 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java @@ -1,45 +1,47 @@ package io.quarkus.maven; -import java.io.ByteArrayOutputStream; -import java.io.DataOutputStream; +import static org.twdata.maven.mojoexecutor.MojoExecutor.artifactId; +import static org.twdata.maven.mojoexecutor.MojoExecutor.configuration; +import static org.twdata.maven.mojoexecutor.MojoExecutor.executeMojo; +import static org.twdata.maven.mojoexecutor.MojoExecutor.executionEnvironment; +import static org.twdata.maven.mojoexecutor.MojoExecutor.goal; +import static org.twdata.maven.mojoexecutor.MojoExecutor.groupId; +import static org.twdata.maven.mojoexecutor.MojoExecutor.plugin; +import static org.twdata.maven.mojoexecutor.MojoExecutor.version; + import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.ObjectOutputStream; -import java.net.InetAddress; -import java.net.Socket; -import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; -import java.util.jar.Attributes; -import java.util.jar.Manifest; import java.util.stream.Collectors; import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; +import java.util.zip.ZipFile; import org.apache.maven.artifact.Artifact; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Dependency; import org.apache.maven.model.Plugin; +import org.apache.maven.model.PluginExecution; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.BuildPluginManager; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugin.descriptor.MojoDescriptor; +import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; @@ -47,14 +49,13 @@ import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.shared.utils.cli.CommandLineUtils; +import org.apache.maven.toolchain.Toolchain; +import org.apache.maven.toolchain.ToolchainManager; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.collection.CollectRequest; -import org.eclipse.aether.collection.DependencyCollectionException; -import org.eclipse.aether.graph.DependencyNode; -import org.eclipse.aether.graph.DependencyVisitor; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.repository.WorkspaceReader; import org.eclipse.aether.resolution.ArtifactRequest; @@ -64,17 +65,18 @@ import org.eclipse.aether.resolution.DependencyResolutionException; import org.eclipse.aether.resolution.DependencyResult; import org.eclipse.aether.util.artifact.JavaScopes; -import org.twdata.maven.mojoexecutor.MojoExecutor; +import io.quarkus.bootstrap.devmode.DependenciesFilter; import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.bootstrap.model.AppModel; import io.quarkus.bootstrap.resolver.maven.options.BootstrapMavenOptions; import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; -import io.quarkus.bootstrap.resolver.maven.workspace.LocalWorkspace; import io.quarkus.deployment.dev.DevModeContext; import io.quarkus.deployment.dev.DevModeMain; +import io.quarkus.deployment.dev.QuarkusDevModeLauncher; +import io.quarkus.maven.MavenDevModeLauncher.Builder; import io.quarkus.maven.components.MavenVersionEnforcer; import io.quarkus.maven.utilities.MojoUtils; -import io.quarkus.utilities.JavaBinFinder; /** * The dev mojo, that runs a quarkus app in a forked process. A background compilation process is launched and any changes are @@ -85,6 +87,10 @@ @Mojo(name = "dev", defaultPhase = LifecyclePhase.PREPARE_PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME) public class DevMojo extends AbstractMojo { + private static final String EXT_PROPERTIES_PATH = "META-INF/quarkus-extension.properties"; + + private static final String KOTLIN_MAVEN_PLUGIN_GA = "org.jetbrains.kotlin:kotlin-maven-plugin"; + /** * running any one of these phases means the compile phase will have been run, if these have * not been run we manually run compile @@ -110,11 +116,12 @@ public class DevMojo extends AbstractMojo { private static final String QUARKUS_PLUGIN_GROUPID = "io.quarkus"; private static final String QUARKUS_PLUGIN_ARTIFACTID = "quarkus-maven-plugin"; - private static final String QUARKUS_PREPARE_GOAL = "prepare"; + private static final String QUARKUS_GENERATE_CODE_GOAL = "generate-code"; private static final String ORG_APACHE_MAVEN_PLUGINS = "org.apache.maven.plugins"; private static final String MAVEN_COMPILER_PLUGIN = "maven-compiler-plugin"; private static final String MAVEN_RESOURCES_PLUGIN = "maven-resources-plugin"; + private static final String MAVEN_TOOLCHAINS_PLUGIN = "maven-toolchains-plugin"; private static final String ORG_JETBRAINS_KOTLIN = "org.jetbrains.kotlin"; private static final String KOTLIN_MAVEN_PLUGIN = "kotlin-maven-plugin"; @@ -180,6 +187,9 @@ public class DevMojo extends AbstractMojo { @Parameter(defaultValue = "${suspend}") private String suspend; + @Parameter(defaultValue = "${debugHost}") + private String debugHost; + @Parameter(defaultValue = "${project.build.directory}") private File buildDir; @@ -195,6 +205,12 @@ public class DevMojo extends AbstractMojo { @Parameter(defaultValue = "${quarkus.args}") private String argsString; + @Parameter + private Map environmentVariables = Collections.emptyMap(); + + @Parameter + private Map systemProperties = Collections.emptyMap(); + @Parameter(defaultValue = "${session}") private MavenSession session; @@ -263,11 +279,22 @@ public class DevMojo extends AbstractMojo { private Boolean debugPortOk; + @Component + private ToolchainManager toolchainManager; + + @Override + public void setLog(Log log) { + super.setLog(log); + MojoLogger.delegate = log; + } + @Override public void execute() throws MojoFailureException, MojoExecutionException { mavenVersionEnforcer.ensureMavenVersion(getLog(), session); + initToolchain(); + //we always want to compile if needed, so if it is run from the parent it will compile dependent projects handleAutoCompile(); @@ -282,52 +309,9 @@ public void execute() throws MojoFailureException, MojoExecutionException { } } - // don't warn if running in a module of a multi-module project - if (!sourceDir.isDirectory() && (project.getParentFile() == null || !project.getParentFile().exists())) { - getLog().warn("The project's sources directory does not exist " + sourceDir); - } - try { - List args = new ArrayList<>(); - String javaTool = JavaBinFinder.findBin(); - getLog().debug("Using javaTool: " + javaTool); - args.add(javaTool); - if (this.suspend != null) { - switch (this.suspend.toLowerCase(Locale.ENGLISH)) { - case "n": - case "false": { - suspend = "n"; - break; - } - case "y": - case "true": { - suspend = "y"; - break; - } - default: { - getLog().warn( - "Ignoring invalid value \"" + suspend + "\" for \"suspend\" param and defaulting to \"n\""); - suspend = "n"; - break; - } - } - } else { - suspend = "n"; - } - if (jvmArgs != null) { - args.addAll(Arrays.asList(CommandLineUtils.translateCommandline(jvmArgs))); - } - - // the following flags reduce startup time and are acceptable only for dev purposes - args.add("-XX:TieredStopAtLevel=1"); - if (!preventnoverify) { - args.add("-Xverify:none"); - } - - DevModeRunner runner = new DevModeRunner(args); - - runner.prepare(false); + DevModeRunner runner = new DevModeRunner(); Map pomFiles = readPomFileTimestamps(runner); runner.run(); long nextCheck = System.currentTimeMillis() + 100; @@ -338,8 +322,8 @@ public void execute() throws MojoFailureException, MojoExecutionException { Thread.sleep(sleep); if (System.currentTimeMillis() > nextCheck) { nextCheck = System.currentTimeMillis() + 100; - if (!runner.process.isAlive()) { - if (runner.process.exitValue() != 0) { + if (!runner.alive()) { + if (runner.exitValue() != 0) { throw new MojoExecutionException("Dev mode process did not complete successfully"); } return; @@ -354,9 +338,10 @@ public void execute() throws MojoFailureException, MojoExecutionException { } if (!changed.isEmpty()) { getLog().info("Changes detected to " + changed + ", restarting dev mode"); - DevModeRunner newRunner = new DevModeRunner(args); + final DevModeRunner newRunner; try { - newRunner.prepare(true); + triggerCompile(); + newRunner = new DevModeRunner(); } catch (Exception e) { getLog().info("Could not load changed pom.xml file, changes not applied", e); continue; @@ -401,34 +386,22 @@ private void handleAutoCompile() throws MojoExecutionException { } } + private void initToolchain() throws MojoExecutionException { + executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_TOOLCHAINS_PLUGIN, "toolchain"); + } + private void triggerPrepare() throws MojoExecutionException { - Plugin quarkusPlugin = project.getPlugin(QUARKUS_PLUGIN_GROUPID + ":" + QUARKUS_PLUGIN_ARTIFACTID); - MojoExecutor.executeMojo( - quarkusPlugin, - MojoExecutor.goal(QUARKUS_PREPARE_GOAL), - MojoExecutor.configuration(), - MojoExecutor.executionEnvironment( - project, - session, - pluginManager)); + executeIfConfigured(QUARKUS_PLUGIN_GROUPID, QUARKUS_PLUGIN_ARTIFACTID, QUARKUS_GENERATE_CODE_GOAL); } private void triggerCompile() throws MojoExecutionException { handleResources(); // compile the Kotlin sources if needed - final String kotlinMavenPluginKey = ORG_JETBRAINS_KOTLIN + ":" + KOTLIN_MAVEN_PLUGIN; - final Plugin kotlinMavenPlugin = project.getPlugin(kotlinMavenPluginKey); - if (kotlinMavenPlugin != null) { - executeCompileGoal(kotlinMavenPlugin, ORG_JETBRAINS_KOTLIN, KOTLIN_MAVEN_PLUGIN); - } + executeIfConfigured(ORG_JETBRAINS_KOTLIN, KOTLIN_MAVEN_PLUGIN, "compile"); // Compile the Java sources if needed - final String compilerPluginKey = ORG_APACHE_MAVEN_PLUGINS + ":" + MAVEN_COMPILER_PLUGIN; - final Plugin compilerPlugin = project.getPlugin(compilerPluginKey); - if (compilerPlugin != null) { - executeCompileGoal(compilerPlugin, ORG_APACHE_MAVEN_PLUGINS, MAVEN_COMPILER_PLUGIN); - } + executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_COMPILER_PLUGIN, "compile"); } /** @@ -439,56 +412,76 @@ private void handleResources() throws MojoExecutionException { if (resources.isEmpty()) { return; } - Plugin resourcesPlugin = project.getPlugin(ORG_APACHE_MAVEN_PLUGINS + ":" + MAVEN_RESOURCES_PLUGIN); - if (resourcesPlugin == null) { - return; - } - MojoExecutor.executeMojo( - MojoExecutor.plugin( - MojoExecutor.groupId(ORG_APACHE_MAVEN_PLUGINS), - MojoExecutor.artifactId(MAVEN_RESOURCES_PLUGIN), - MojoExecutor.version(resourcesPlugin.getVersion()), - resourcesPlugin.getDependencies()), - MojoExecutor.goal("resources"), - getPluginConfig(resourcesPlugin), - MojoExecutor.executionEnvironment( - project, - session, - pluginManager)); + executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_RESOURCES_PLUGIN, "resources"); } - private void executeCompileGoal(Plugin plugin, String groupId, String artifactId) throws MojoExecutionException { - MojoExecutor.executeMojo( - MojoExecutor.plugin( - MojoExecutor.groupId(groupId), - MojoExecutor.artifactId(artifactId), - MojoExecutor.version(plugin.getVersion()), + private void executeIfConfigured(String pluginGroupId, String pluginArtifactId, String goal) throws MojoExecutionException { + final Plugin plugin = project.getPlugin(pluginGroupId + ":" + pluginArtifactId); + if (plugin == null || plugin.getExecutions().stream().noneMatch(exec -> exec.getGoals().contains(goal))) { + return; + } + executeMojo( + plugin( + groupId(pluginGroupId), + artifactId(pluginArtifactId), + version(plugin.getVersion()), plugin.getDependencies()), - MojoExecutor.goal("compile"), - getPluginConfig(plugin), - MojoExecutor.executionEnvironment( + goal(goal), + getPluginConfig(plugin, goal), + executionEnvironment( project, session, pluginManager)); } - private Xpp3Dom getPluginConfig(Plugin plugin) { - Xpp3Dom configuration = MojoExecutor.configuration(); - Xpp3Dom pluginConfiguration = (Xpp3Dom) plugin.getConfiguration(); - if (pluginConfiguration != null) { - //Filter out `test*` configurations - for (Xpp3Dom child : pluginConfiguration.getChildren()) { - if (!child.getName().startsWith("test")) { + private Xpp3Dom getPluginConfig(Plugin plugin, String goal) throws MojoExecutionException { + Xpp3Dom mergedConfig = null; + if (!plugin.getExecutions().isEmpty()) { + for (PluginExecution exec : plugin.getExecutions()) { + if (exec.getConfiguration() != null && exec.getGoals().contains(goal)) { + mergedConfig = mergedConfig == null ? (Xpp3Dom) exec.getConfiguration() + : Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) exec.getConfiguration(), true); + } + } + } + + if ((Xpp3Dom) plugin.getConfiguration() != null) { + mergedConfig = mergedConfig == null ? (Xpp3Dom) plugin.getConfiguration() + : Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) plugin.getConfiguration(), true); + } + + final Xpp3Dom configuration = configuration(); + if (mergedConfig != null) { + Set supportedParams = null; + // Filter out `test*` configurations + for (Xpp3Dom child : mergedConfig.getChildren()) { + if (child.getName().startsWith("test")) { + continue; + } + if (supportedParams == null) { + supportedParams = getMojoDescriptor(plugin, goal).getParameterMap().keySet(); + } + if (supportedParams.contains(child.getName())) { configuration.addChild(child); } } } + return configuration; } + private MojoDescriptor getMojoDescriptor(Plugin plugin, String goal) throws MojoExecutionException { + try { + return pluginManager.getMojoDescriptor(plugin, goal, repos, repoSession); + } catch (Exception e) { + throw new MojoExecutionException( + "Failed to obtain descriptor for Maven plugin " + plugin.getId() + " goal " + goal, e); + } + } + private Map readPomFileTimestamps(DevModeRunner runner) throws IOException { Map ret = new HashMap<>(); - for (Path i : runner.getPomFiles()) { + for (Path i : runner.pomFiles()) { ret.put(i, Files.getLastModifiedTime(i).toMillis()); } return ret; @@ -502,7 +495,7 @@ private String getSourceEncoding() { return null; } - private void addProject(DevModeContext devModeContext, LocalProject localProject, boolean root) { + private void addProject(MavenDevModeLauncher.Builder builder, LocalProject localProject, boolean root) throws Exception { String projectDirectory = null; Set sourcePaths = null; @@ -539,6 +532,11 @@ private void addProject(DevModeContext devModeContext, LocalProject localProject resourcePath = resourcesSourcesDir.toAbsolutePath().toString(); } + if (classesPath == null && (!sourcePaths.isEmpty() || resourcePath != null)) { + throw new MojoExecutionException("Hot reloadable dependency " + localProject.getAppArtifact() + + " has not been compiled yet (the classes directory " + classesDir + " does not exist)"); + } + Path targetDir = Paths.get(project.getBuild().getDirectory()); DevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo(localProject.getKey(), @@ -551,437 +549,336 @@ private void addProject(DevModeContext devModeContext, LocalProject localProject targetDir.resolve("generated-sources").toAbsolutePath().toString(), targetDir.toAbsolutePath().toString()); if (root) { - devModeContext.setApplicationRoot(moduleInfo); + builder.mainModule(moduleInfo); } else { - devModeContext.getAdditionalModules().add(moduleInfo); + builder.dependency(moduleInfo); } } - private void addToClassPaths(StringBuilder classPathManifest, File file) { - final URI uri = file.toPath().toAbsolutePath().toUri(); - classPathManifest.append(uri).append(" "); - } - - class DevModeRunner { + private class DevModeRunner { - private static final String KOTLIN_MAVEN_PLUGIN_GA = "org.jetbrains.kotlin:kotlin-maven-plugin"; - private final List args; + final QuarkusDevModeLauncher launcher; private Process process; - private Set pomFiles = new HashSet<>(); - DevModeRunner(List args) { - this.args = new ArrayList<>(args); + private DevModeRunner() throws Exception { + launcher = newLauncher(); } - /** - * Attempts to prepare the dev mode runner. - */ - void prepare(final boolean triggerCompile) throws Exception { - if (triggerCompile) { - triggerCompile(); - } - if (debug == null) { - // debug mode not specified - // make sure 5005 is not used, we don't want to just fail if something else is using it - // we don't check this on restarts, as the previous process is still running - if (debugPortOk == null) { - try (Socket socket = new Socket(InetAddress.getByAddress(new byte[] { 127, 0, 0, 1 }), 5005)) { - getLog().error("Port 5005 in use, not starting in debug mode"); - debugPortOk = false; - } catch (IOException e) { - debugPortOk = true; - } - } - if (debugPortOk) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:5005,server=y,suspend=" + suspend); - } - } else if (debug.toLowerCase().equals("client")) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=n,suspend=" + suspend); - } else if (debug.toLowerCase().equals("true")) { - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:5005,server=y,suspend=" + suspend); - } else if (!debug.toLowerCase().equals("false")) { - try { - int port = Integer.parseInt(debug); - if (port <= 0) { - throw new MojoFailureException("The specified debug port must be greater than 0"); - } - args.add("-Xdebug"); - args.add("-Xrunjdwp:transport=dt_socket,address=0.0.0.0:" + port + ",server=y,suspend=" + suspend); - } catch (NumberFormatException e) { - throw new MojoFailureException( - "Invalid value for debug parameter: " + debug + " must be true|false|client|{port}"); - } - } - //build a class-path string for the base platform - //this stuff does not change - // Do not include URIs in the manifest, because some JVMs do not like that - StringBuilder classPathManifest = new StringBuilder(); - final DevModeContext devModeContext = new DevModeContext(); - for (Map.Entry e : System.getProperties().entrySet()) { - devModeContext.getSystemProperties().put(e.getKey().toString(), (String) e.getValue()); - } - devModeContext.setProjectDir(project.getFile().getParentFile()); - devModeContext.getBuildSystemProperties().putAll((Map) project.getProperties()); - - // this is a minor hack to allow ApplicationConfig to be populated with defaults - devModeContext.getBuildSystemProperties().putIfAbsent("quarkus.application.name", project.getArtifactId()); - devModeContext.getBuildSystemProperties().putIfAbsent("quarkus.application.version", project.getVersion()); - - devModeContext.setSourceEncoding(getSourceEncoding()); - - // Set compilation flags. Try the explicitly given configuration first. Otherwise, - // refer to the configuration of the Maven Compiler Plugin. - final Optional compilerPluginConfiguration = findCompilerPluginConfiguration(); - if (compilerArgs != null) { - devModeContext.setCompilerOptions(compilerArgs); - } else if (compilerPluginConfiguration.isPresent()) { - final Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.get().getChild("compilerArgs"); - if (compilerPluginArgsConfiguration != null) { - List compilerPluginArgs = new ArrayList<>(); - for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) { - compilerPluginArgs.add(argConfiguration.getValue()); - } - // compilerArgs can also take a value without using arg - if (compilerPluginArgsConfiguration.getValue() != null - && !compilerPluginArgsConfiguration.getValue().isEmpty()) { - compilerPluginArgs.add(compilerPluginArgsConfiguration.getValue().trim()); - } - devModeContext.setCompilerOptions(compilerPluginArgs); - } - } - if (source != null) { - devModeContext.setSourceJavaVersion(source); - } else if (compilerPluginConfiguration.isPresent()) { - final Xpp3Dom javacSourceVersion = compilerPluginConfiguration.get().getChild("source"); - if (javacSourceVersion != null && javacSourceVersion.getValue() != null - && !javacSourceVersion.getValue().trim().isEmpty()) { - devModeContext.setSourceJavaVersion(javacSourceVersion.getValue().trim()); - } - } - if (target != null) { - devModeContext.setTargetJvmVersion(target); - } else if (compilerPluginConfiguration.isPresent()) { - final Xpp3Dom javacTargetVersion = compilerPluginConfiguration.get().getChild("target"); - if (javacTargetVersion != null && javacTargetVersion.getValue() != null - && !javacTargetVersion.getValue().trim().isEmpty()) { - devModeContext.setTargetJvmVersion(javacTargetVersion.getValue().trim()); - } - } - - setKotlinSpecificFlags(devModeContext); - if (noDeps) { - final LocalProject localProject = LocalProject.load(project.getModel().getPomFile().toPath()); - addProject(devModeContext, localProject, true); - pomFiles.add(localProject.getRawModel().getPomFile().toPath()); - devModeContext.getLocalArtifacts() - .add(new AppArtifactKey(localProject.getGroupId(), localProject.getArtifactId(), null, "jar")); - } else { - final LocalProject localProject = LocalProject.loadWorkspace(project.getModel().getPomFile().toPath()); - for (LocalProject project : filterExtensionDependencies(localProject)) { - addProject(devModeContext, project, project == localProject); - pomFiles.add(project.getRawModel().getPomFile().toPath()); - devModeContext.getLocalArtifacts() - .add(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null, "jar")); - } - } - - addQuarkusDevModeDeps(classPathManifest); - - args.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager"); + Collection pomFiles() { + return launcher.watchedBuildFiles(); + } - //in most cases these are not used, however they need to be present for some - //parent-first cases such as logging - for (Artifact appDep : project.getArtifacts()) { - // only add the artifact if it's present in the dev mode context - // we need this to avoid having jars on the classpath multiple times - if (!devModeContext.getLocalArtifacts().contains(new AppArtifactKey(appDep.getGroupId(), appDep.getArtifactId(), - appDep.getClassifier(), appDep.getArtifactHandler().getExtension()))) { - addToClassPaths(classPathManifest, appDep.getFile()); - } - } + boolean alive() { + return process == null ? false : process.isAlive(); + } - //now we need to build a temporary jar to actually run + int exitValue() { + return process == null ? -1 : process.exitValue(); + } - File tempFile = new File(buildDir, project.getArtifactId() + "-dev.jar"); - tempFile.delete(); - // Only delete the -dev.jar on exit if requested - if (deleteDevJar) { - tempFile.deleteOnExit(); + void run() throws Exception { + // Display the launch command line in dev mode + if (getLog().isDebugEnabled()) { + getLog().debug("Launching JVM with command line: " + String.join(" ", launcher.args())); } - getLog().debug("Executable jar: " + tempFile.getAbsolutePath()); - - devModeContext.setBaseName(project.getBuild().getFinalName()); - devModeContext.setCacheDir(new File(buildDir, "transformer-cache").getAbsoluteFile()); - - // this is the jar file we will use to launch the dev mode main class - devModeContext.setDevModeRunnerJarFile(tempFile); - - modifyDevModeContext(devModeContext); - - try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile))) { - out.putNextEntry(new ZipEntry("META-INF/")); - Manifest manifest = new Manifest(); - manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); - manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classPathManifest.toString()); - manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, DevModeMain.class.getName()); - out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF")); - manifest.write(out); - - out.putNextEntry(new ZipEntry(DevModeMain.DEV_MODE_CONTEXT)); - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes)); - obj.writeObject(devModeContext); - obj.close(); - out.write(bytes.toByteArray()); + final ProcessBuilder processBuilder = new ProcessBuilder(launcher.args()) + .redirectErrorStream(true) + .inheritIO() + .directory(workingDir == null ? buildDir : workingDir); + if (!environmentVariables.isEmpty()) { + processBuilder.environment().putAll(environmentVariables); } + process = processBuilder.start(); - outputDirectory.mkdirs(); - // if the --enable-preview flag was set, then we need to enable it when launching dev mode as well - if (devModeContext.isEnablePreview()) { - args.add(DevModeContext.ENABLE_PREVIEW_FLAG); - } + //https://github.com/quarkusio/quarkus/issues/232 + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + process.destroy(); + try { + process.waitFor(); + } catch (InterruptedException ignored) { + getLog().warn("Unable to properly wait for dev-mode end", ignored); + } + } + }, "Development Mode Shutdown Hook")); + } - propagateUserProperties(); + void stop() throws InterruptedException { + process.destroy(); + process.waitFor(); + } + } - args.add("-jar"); - args.add(tempFile.getAbsolutePath()); - if (argsString != null) { - args.addAll(Arrays.asList(CommandLineUtils.translateCommandline(argsString))); + private QuarkusDevModeLauncher newLauncher() throws Exception { + String java = null; + // See if a toolchain is configured + if (toolchainManager != null) { + Toolchain toolchain = toolchainManager.getToolchainFromBuildContext("jdk", session); + if (toolchain != null) { + java = toolchain.findTool("java"); + getLog().info("JVM from toolchain: " + java); } } - private void propagateUserProperties() { - final String mavenCmdLine = BootstrapMavenOptions.getMavenCmdLine(); - if (mavenCmdLine == null || mavenCmdLine.isEmpty()) { - return; - } - int i = mavenCmdLine.indexOf("-D"); - if (i < 0) { - return; - } - final StringBuilder buf = new StringBuilder(); - buf.append("-D"); - i += 2; - while (i < mavenCmdLine.length()) { - final char ch = mavenCmdLine.charAt(i++); - if (!Character.isWhitespace(ch)) { - buf.append(ch); - } else if (buf.length() > 2) { - args.add(buf.toString()); - buf.setLength(2); - i = mavenCmdLine.indexOf("-D", i); - if (i < 0) { - break; - } - i += 2; + final MavenDevModeLauncher.Builder builder = MavenDevModeLauncher.builder(java, getLog()) + .preventnoverify(preventnoverify) + .buildDir(buildDir) + .outputDir(outputDirectory) + .suspend(suspend) + .debug(debug) + .debugHost(debugHost) + .debugPortOk(debugPortOk) + .deleteDevJar(deleteDevJar); + + setJvmArgs(builder); + + builder.projectDir(project.getFile().getParentFile()); + builder.buildSystemProperties((Map) project.getProperties()); + + builder.applicationName(project.getArtifactId()); + builder.applicationVersion(project.getVersion()); + + builder.sourceEncoding(getSourceEncoding()); + + // Set compilation flags. Try the explicitly given configuration first. Otherwise, + // refer to the configuration of the Maven Compiler Plugin. + final Optional compilerPluginConfiguration = findCompilerPluginConfiguration(); + if (compilerArgs != null) { + builder.compilerOptions(compilerArgs); + } else if (compilerPluginConfiguration.isPresent()) { + final Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.get().getChild("compilerArgs"); + if (compilerPluginArgsConfiguration != null) { + List compilerPluginArgs = new ArrayList<>(); + for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) { + compilerPluginArgs.add(argConfiguration.getValue()); } - } - if (buf.length() > 2) { - args.add(buf.toString()); + // compilerArgs can also take a value without using arg + if (compilerPluginArgsConfiguration.getValue() != null + && !compilerPluginArgsConfiguration.getValue().isEmpty()) { + compilerPluginArgs.add(compilerPluginArgsConfiguration.getValue().trim()); + } + builder.compilerOptions(compilerPluginArgs); } } - - private void addQuarkusDevModeDeps(StringBuilder classPathManifest) - throws MojoExecutionException, DependencyResolutionException { - final String pomPropsPath = "META-INF/maven/io.quarkus/quarkus-core-deployment/pom.properties"; - final InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath); - if (devModePomPropsIs == null) { - throw new MojoExecutionException("Failed to locate " + pomPropsPath + " on the classpath"); - } - final Properties devModeProps = new Properties(); - try (InputStream is = devModePomPropsIs) { - devModeProps.load(is); - } catch (IOException e) { - throw new MojoExecutionException("Failed to load " + pomPropsPath + " from the classpath", e); - } - final String devModeGroupId = devModeProps.getProperty("groupId"); - if (devModeGroupId == null) { - throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing groupId"); - } - final String devModeArtifactId = devModeProps.getProperty("artifactId"); - if (devModeArtifactId == null) { - throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing artifactId"); - } - final String devModeVersion = devModeProps.getProperty("version"); - if (devModeVersion == null) { - throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing version"); - } - - final DefaultArtifact devModeJar = new DefaultArtifact(devModeGroupId, devModeArtifactId, "jar", devModeVersion); - final DependencyResult cpRes = repoSystem.resolveDependencies(repoSession, - new DependencyRequest() - .setCollectRequest( - new CollectRequest() - .setRoot(new org.eclipse.aether.graph.Dependency(devModeJar, JavaScopes.RUNTIME)) - .setRepositories(repos))); - - for (ArtifactResult appDep : cpRes.getArtifactResults()) { - //we only use the launcher for launching from the IDE, we need to exclude it - if (!(appDep.getArtifact().getGroupId().equals("io.quarkus") - && appDep.getArtifact().getArtifactId().equals("quarkus-ide-launcher"))) { - addToClassPaths(classPathManifest, appDep.getArtifact().getFile()); - } + if (source != null) { + builder.sourceJavaVersion(source); + } else if (compilerPluginConfiguration.isPresent()) { + final Xpp3Dom javacSourceVersion = compilerPluginConfiguration.get().getChild("source"); + if (javacSourceVersion != null && javacSourceVersion.getValue() != null + && !javacSourceVersion.getValue().trim().isEmpty()) { + builder.sourceJavaVersion(javacSourceVersion.getValue().trim()); } } - - private void setKotlinSpecificFlags(DevModeContext devModeContext) { - Plugin kotlinMavenPlugin = null; - for (Plugin plugin : project.getBuildPlugins()) { - if (plugin.getKey().equals(KOTLIN_MAVEN_PLUGIN_GA)) { - kotlinMavenPlugin = plugin; - break; - } + if (target != null) { + builder.targetJavaVersion(target); + } else if (compilerPluginConfiguration.isPresent()) { + final Xpp3Dom javacTargetVersion = compilerPluginConfiguration.get().getChild("target"); + if (javacTargetVersion != null && javacTargetVersion.getValue() != null + && !javacTargetVersion.getValue().trim().isEmpty()) { + builder.targetJavaVersion(javacTargetVersion.getValue().trim()); } + } - if (kotlinMavenPlugin == null) { - return; + setKotlinSpecificFlags(builder); + if (noDeps) { + final LocalProject localProject = LocalProject.load(project.getModel().getPomFile().toPath()); + addProject(builder, localProject, true); + builder.watchedBuildFile(localProject.getRawModel().getPomFile().toPath()); + builder.localArtifact(new AppArtifactKey(localProject.getGroupId(), localProject.getArtifactId(), null, "jar")); + } else { + final LocalProject localProject = LocalProject.loadWorkspace(project.getModel().getPomFile().toPath()); + for (LocalProject project : DependenciesFilter.filterNotReloadableDependencies(localProject, + this.project.getArtifacts(), repoSystem, repoSession, repos)) { + addProject(builder, project, project == localProject); + builder.watchedBuildFile(project.getRawModel().getPomFile().toPath()); + builder.localArtifact(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null, "jar")); } + } - getLog().debug("Kotlin Maven plugin detected"); - - List compilerPluginArtifacts = new ArrayList<>(); - List dependencies = kotlinMavenPlugin.getDependencies(); - for (Dependency dependency : dependencies) { - try { - ArtifactResult resolvedArtifact = repoSystem.resolveArtifact(repoSession, - new ArtifactRequest() - .setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(), - dependency.getClassifier(), dependency.getType(), dependency.getVersion())) - .setRepositories(repos)); - - compilerPluginArtifacts.add(resolvedArtifact.getArtifact().getFile().toPath().toAbsolutePath().toString()); - } catch (ArtifactResolutionException e) { - getLog().warn("Unable to properly setup dev-mode for Kotlin", e); - return; - } - } - devModeContext.setCompilerPluginArtifacts(compilerPluginArtifacts); + addQuarkusDevModeDeps(builder); + + //in most cases these are not used, however they need to be present for some + //parent-first cases such as logging + //first we go through and get all the parent first artifacts + Set parentFirstArtifacts = new HashSet<>(); + for (Artifact appDep : project.getArtifacts()) { + if (appDep.getArtifactHandler().getExtension().equals("jar") && appDep.getFile().isFile()) { + try (ZipFile file = new ZipFile(appDep.getFile())) { + ZipEntry entry = file.getEntry(EXT_PROPERTIES_PATH); + if (entry != null) { + Properties p = new Properties(); + try (InputStream inputStream = file.getInputStream(entry)) { + p.load(inputStream); + String parentFirst = p.getProperty(AppModel.PARENT_FIRST_ARTIFACTS); + if (parentFirst != null) { + String[] artifacts = parentFirst.split(","); + for (String artifact : artifacts) { + parentFirstArtifacts.add(new AppArtifactKey(artifact.split(":"))); + } + } + } - List options = new ArrayList<>(); - Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) kotlinMavenPlugin.getConfiguration(); - if (compilerPluginConfiguration != null) { - Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.getChild("pluginOptions"); - if (compilerPluginArgsConfiguration != null) { - for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) { - options.add(argConfiguration.getValue()); } } } - devModeContext.setCompilerPluginsOptions(options); } - - public Set getPomFiles() { - return pomFiles; - } - - public void run() throws Exception { - // Display the launch command line in dev mode - if (getLog().isDebugEnabled()) { - getLog().debug("Launching JVM with command line: " + String.join(" ", args)); + for (Artifact appDep : project.getArtifacts()) { + // only add the artifact if it's present in the dev mode context + // we need this to avoid having jars on the classpath multiple times + AppArtifactKey key = new AppArtifactKey(appDep.getGroupId(), appDep.getArtifactId(), + appDep.getClassifier(), appDep.getArtifactHandler().getExtension()); + if (!builder.isLocal(key) && parentFirstArtifacts.contains(key)) { + builder.classpathEntry(appDep.getFile()); } - process = new ProcessBuilder(args) - .inheritIO() - .directory(workingDir) - .start(); - //https://github.com/quarkusio/quarkus/issues/232 - Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { - @Override - public void run() { - process.destroy(); - } - }, "Development Mode Shutdown Hook")); } - public void stop() throws InterruptedException { - process.destroy(); - process.waitFor(); + builder.baseName(project.getBuild().getFinalName()); + + modifyDevModeContext(builder); + + if (argsString != null) { + builder.applicationArgs(argsString); } + propagateUserProperties(builder); + return builder.build(); } - protected void modifyDevModeContext(DevModeContext devModeContext) { + private void setJvmArgs(Builder builder) throws Exception { + String jvmArgs = this.jvmArgs; + if (!systemProperties.isEmpty()) { + final StringBuilder buf = new StringBuilder(); + if (jvmArgs != null) { + buf.append(jvmArgs); + } + for (Map.Entry prop : systemProperties.entrySet()) { + buf.append(" -D").append(prop.getKey()).append("=\"").append(prop.getValue()).append("\""); + } + jvmArgs = buf.toString(); + } + if (jvmArgs != null) { + builder.jvmArgs(Arrays.asList(CommandLineUtils.translateCommandline(jvmArgs))); + } } - private List filterExtensionDependencies(LocalProject localProject) { - final LocalWorkspace workspace = localProject.getWorkspace(); - if (workspace == null) { - return Collections.singletonList(localProject); + private void propagateUserProperties(MavenDevModeLauncher.Builder builder) { + Properties userProps = BootstrapMavenOptions.newInstance().getSystemProperties(); + if (userProps == null) { + return; + } + final StringBuilder buf = new StringBuilder(); + buf.append("-D"); + for (Object o : userProps.keySet()) { + String name = o.toString(); + final String value = userProps.getProperty(name); + buf.setLength(2); + buf.append(name); + if (value != null && !value.isEmpty()) { + buf.append('='); + buf.append(value); + } + builder.jvmArgs(buf.toString()); } + } - List ret = new ArrayList<>(); - Set extensionsAndDeps = new HashSet<>(); + private void addQuarkusDevModeDeps(MavenDevModeLauncher.Builder builder) + throws MojoExecutionException, DependencyResolutionException { + final String pomPropsPath = "META-INF/maven/io.quarkus/quarkus-core-deployment/pom.properties"; + final InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath); + if (devModePomPropsIs == null) { + throw new MojoExecutionException("Failed to locate " + pomPropsPath + " on the classpath"); + } + final Properties devModeProps = new Properties(); + try (InputStream is = devModePomPropsIs) { + devModeProps.load(is); + } catch (IOException e) { + throw new MojoExecutionException("Failed to load " + pomPropsPath + " from the classpath", e); + } + final String devModeGroupId = devModeProps.getProperty("groupId"); + if (devModeGroupId == null) { + throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing groupId"); + } + final String devModeArtifactId = devModeProps.getProperty("artifactId"); + if (devModeArtifactId == null) { + throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing artifactId"); + } + final String devModeVersion = devModeProps.getProperty("version"); + if (devModeVersion == null) { + throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing version"); + } - ret.add(localProject); - for (Artifact a : project.getArtifacts()) { - final AppArtifactKey depKey = new AppArtifactKey(a.getGroupId(), a.getArtifactId()); - final LocalProject project = workspace.getProject(depKey); - if (project == null) { - continue; - } - if (!project.getVersion().equals(a.getVersion())) { - getLog().warn(depKey + " is excluded from live coding since the application depends on version " - + a.getVersion() + " while the version present in the workspace is " + project.getVersion()); - continue; + final DefaultArtifact devModeJar = new DefaultArtifact(devModeGroupId, devModeArtifactId, "jar", devModeVersion); + final DependencyResult cpRes = repoSystem.resolveDependencies(repoSession, + new DependencyRequest() + .setCollectRequest( + new CollectRequest() + .setRoot(new org.eclipse.aether.graph.Dependency(devModeJar, JavaScopes.RUNTIME)) + .setRepositories(repos))); + + for (ArtifactResult appDep : cpRes.getArtifactResults()) { + //we only use the launcher for launching from the IDE, we need to exclude it + if (!(appDep.getArtifact().getGroupId().equals("io.quarkus") + && appDep.getArtifact().getArtifactId().equals("quarkus-ide-launcher"))) { + if (appDep.getArtifact().getGroupId().equals("io.quarkus") + && appDep.getArtifact().getArtifactId().equals("quarkus-class-change-agent")) { + builder.jvmArgs("-javaagent:" + appDep.getArtifact().getFile().getAbsolutePath()); + } else { + builder.classpathEntry(appDep.getArtifact().getFile()); + } } - if (project.getClassesDir() != null && - //if this project also contains Quarkus extensions we do no want to include these in the discovery - //a bit of an edge case, but if you try and include a sample project with your extension you will - //run into problems without this - (Files.exists(project.getClassesDir().resolve("META-INF/quarkus-extension.properties")) || - Files.exists(project.getClassesDir().resolve("META-INF/quarkus-build-steps.list")))) { - // TODO add the deployment deps - extensionDepWarning(depKey); - try { - final DependencyNode depRoot = repoSystem.collectDependencies(repoSession, new CollectRequest() - .setRoot(new org.eclipse.aether.graph.Dependency( - new DefaultArtifact(a.getGroupId(), a.getArtifactId(), - a.getClassifier(), a.getArtifactHandler().getExtension(), a.getVersion()), - JavaScopes.RUNTIME)) - .setRepositories(repos)).getRoot(); - depRoot.accept(new DependencyVisitor() { - @Override - public boolean visitEnter(DependencyNode node) { - final org.eclipse.aether.artifact.Artifact artifact = node.getArtifact(); - if ("jar".equals(artifact.getExtension())) { - extensionsAndDeps.add(new AppArtifactKey(artifact.getGroupId(), artifact.getArtifactId())); - } - return true; - } + } + } - @Override - public boolean visitLeave(DependencyNode node) { - return true; - } - }); - } catch (DependencyCollectionException e) { - throw new RuntimeException("Failed to collect dependencies for " + a, e); - } - } else { - ret.add(project); + private void setKotlinSpecificFlags(MavenDevModeLauncher.Builder builder) { + Plugin kotlinMavenPlugin = null; + for (Plugin plugin : project.getBuildPlugins()) { + if (plugin.getKey().equals(KOTLIN_MAVEN_PLUGIN_GA)) { + kotlinMavenPlugin = plugin; + break; } } - if (extensionsAndDeps.isEmpty()) { - return ret; + if (kotlinMavenPlugin == null) { + return; } - Iterator iterator = ret.iterator(); - while (iterator.hasNext()) { - final LocalProject localDep = iterator.next(); - if (extensionsAndDeps.contains(localDep.getKey())) { - extensionDepWarning(localDep.getKey()); - iterator.remove(); + getLog().debug("Kotlin Maven plugin detected"); + + List compilerPluginArtifacts = new ArrayList<>(); + List dependencies = kotlinMavenPlugin.getDependencies(); + for (Dependency dependency : dependencies) { + try { + ArtifactResult resolvedArtifact = repoSystem.resolveArtifact(repoSession, + new ArtifactRequest() + .setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(), + dependency.getClassifier(), dependency.getType(), dependency.getVersion())) + .setRepositories(repos)); + + compilerPluginArtifacts.add(resolvedArtifact.getArtifact().getFile().toPath().toAbsolutePath().toString()); + } catch (ArtifactResolutionException e) { + getLog().warn("Unable to properly setup dev-mode for Kotlin", e); + return; } } - return ret; + builder.compilerPluginArtifacts(compilerPluginArtifacts); + + List options = new ArrayList<>(); + Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) kotlinMavenPlugin.getConfiguration(); + if (compilerPluginConfiguration != null) { + Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.getChild("pluginOptions"); + if (compilerPluginArgsConfiguration != null) { + for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) { + options.add(argConfiguration.getValue()); + } + } + } + builder.compilerPluginOptions(options); } - private void extensionDepWarning(AppArtifactKey key) { - getLog().warn("Local Quarkus extension dependency " + key + " will not be hot-reloadable"); + protected void modifyDevModeContext(MavenDevModeLauncher.Builder builder) { + } private Optional findCompilerPluginConfiguration() { diff --git a/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeMojo.java new file mode 100644 index 0000000000000..cb7bd5159d0da --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeMojo.java @@ -0,0 +1,84 @@ +package io.quarkus.maven; + +import java.lang.reflect.Method; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.Consumer; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; +import org.apache.maven.plugins.annotations.ResolutionScope; + +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.bootstrap.model.AppModel; + +@Mojo(name = "generate-code", defaultPhase = LifecyclePhase.GENERATE_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) +public class GenerateCodeMojo extends QuarkusBootstrapMojo { + + /** + * Skip the execution of this mojo + */ + @Parameter(defaultValue = "false", property = "quarkus.generate-code.skip", alias = "quarkus.prepare.skip") + private boolean skipSourceGeneration = false; + + @Override + protected boolean beforeExecute() throws MojoExecutionException, MojoFailureException { + if (mavenProject().getPackaging().equals("pom")) { + getLog().info("Type of the artifact is POM, skipping build goal"); + return false; + } + if (skipSourceGeneration) { + getLog().info("Skipping Quarkus code generation"); + return false; + } + return true; + } + + @Override + protected void doExecute() throws MojoExecutionException, MojoFailureException { + String projectDir = mavenProject().getBasedir().getAbsolutePath(); + Path sourcesDir = Paths.get(projectDir, "src", "main"); + generateCode(sourcesDir, path -> mavenProject().addCompileSourceRoot(path.toString()), false); + } + + void generateCode(Path sourcesDir, + Consumer sourceRegistrar, + boolean test) throws MojoFailureException, MojoExecutionException { + + ClassLoader originalTccl = Thread.currentThread().getContextClassLoader(); + try { + + final CuratedApplication curatedApplication = bootstrapApplication(); + + QuarkusClassLoader deploymentClassLoader = curatedApplication.createDeploymentClassLoader(); + Thread.currentThread().setContextClassLoader(deploymentClassLoader); + + final Class codeGenerator = deploymentClassLoader.loadClass("io.quarkus.deployment.CodeGenerator"); + final Method initAndRun = codeGenerator.getMethod("initAndRun", ClassLoader.class, Set.class, Path.class, + Path.class, + Consumer.class, AppModel.class, Map.class); + initAndRun.invoke(null, deploymentClassLoader, + Collections.singleton(sourcesDir), + generatedSourcesDir(test), + buildDir().toPath(), + sourceRegistrar, + curatedApplication.getAppModel(), + mavenProject().getProperties()); + } catch (Exception any) { + throw new MojoExecutionException("Quarkus code generation phase has failed", any); + } finally { + Thread.currentThread().setContextClassLoader(originalTccl); + } + } + + private Path generatedSourcesDir(boolean test) { + return test ? buildDir().toPath().resolve("generated-test-sources") : buildDir().toPath().resolve("generated-sources"); + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeTestsMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeTestsMojo.java new file mode 100644 index 0000000000000..348e9299f2e20 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/GenerateCodeTestsMojo.java @@ -0,0 +1,20 @@ +package io.quarkus.maven; + +import java.nio.file.Path; +import java.nio.file.Paths; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.ResolutionScope; + +@Mojo(name = "generate-code-tests", defaultPhase = LifecyclePhase.GENERATE_TEST_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME) +public class GenerateCodeTestsMojo extends GenerateCodeMojo { + @Override + protected void doExecute() throws MojoExecutionException, MojoFailureException { + String projectDir = mavenProject().getBasedir().getAbsolutePath(); + Path testSources = Paths.get(projectDir, "src", "test"); + generateCode(testSources, path -> mavenProject().addTestCompileSourceRoot(path.toString()), true); + } +} \ No newline at end of file diff --git a/devtools/maven/src/main/java/io/quarkus/maven/GenerateConfigMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/GenerateConfigMojo.java index 376b11989d4c1..cadf0c0b1e755 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/GenerateConfigMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/GenerateConfigMojo.java @@ -12,6 +12,7 @@ import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; @@ -21,6 +22,7 @@ import org.apache.maven.project.MavenProjectHelper; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.impl.RemoteRepositoryManager; import org.eclipse.aether.repository.RemoteRepository; import io.quarkus.bootstrap.app.CuratedApplication; @@ -30,7 +32,7 @@ import io.quarkus.runner.bootstrap.GenerateConfigTask; /** - * Generates an example application-config.properties, with all properties commented out + * Generates an example application.properties, with all properties commented out. * * If this is already present then it will be appended too, although only properties that were not already present * @@ -47,6 +49,9 @@ public class GenerateConfigMojo extends AbstractMojo { @Component private RepositorySystem repoSystem; + @Component + private RemoteRepositoryManager remoteRepoManager; + @Component private MavenProjectHelper projectHelper; @@ -86,10 +91,6 @@ public class GenerateConfigMojo extends AbstractMojo { @Parameter(defaultValue = "${file}") private String file; - public GenerateConfigMojo() { - MojoLogger.logSupplier = this::getLog; - } - @Override public void execute() throws MojoExecutionException { @@ -120,6 +121,7 @@ public void execute() throws MojoExecutionException { .setRepositorySystem(repoSystem) .setRepositorySystemSession(repoSession) .setRemoteRepositories(repos) + .setRemoteRepositoryManager(remoteRepoManager) .build(); final Artifact projectArtifact = project.getArtifact(); @@ -152,4 +154,10 @@ public void execute() throws MojoExecutionException { throw new MojoExecutionException("Failed to generate config file", e); } } + + @Override + public void setLog(Log log) { + super.setLog(log); + MojoLogger.delegate = log; + } } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/ListExtensionsMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/ListExtensionsMojo.java index d129f98302cc8..8ab5df047dc3c 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/ListExtensionsMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/ListExtensionsMojo.java @@ -1,16 +1,12 @@ package io.quarkus.maven; -import java.net.URL; -import java.util.List; - import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import io.quarkus.devtools.commands.ListExtensions; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.MessageWriter; -import io.quarkus.registry.DefaultExtensionRegistry; /** * List the available extensions. @@ -41,10 +37,10 @@ public class ListExtensionsMojo extends QuarkusProjectMojoBase { protected String searchPattern; /** - * The extension registry URLs + * List the already installed extensions */ - @Parameter(property = "registry", alias = "quarkus.extension.registry") - List registries; + @Parameter(property = "installed", defaultValue = "false") + protected boolean installed; @Override public void doExecute(final QuarkusProject quarkusProject, final MessageWriter log) throws MojoExecutionException { @@ -52,10 +48,8 @@ public void doExecute(final QuarkusProject quarkusProject, final MessageWriter l ListExtensions listExtensions = new ListExtensions(quarkusProject) .all(all) .format(format) - .search(searchPattern); - if (registries != null && !registries.isEmpty()) { - listExtensions.extensionRegistry(DefaultExtensionRegistry.fromURLs(registries)); - } + .search(searchPattern) + .installed(installed); listExtensions.execute(); } catch (Exception e) { throw new MojoExecutionException("Failed to list extensions", e); diff --git a/devtools/maven/src/main/java/io/quarkus/maven/ListPlatformsMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/ListPlatformsMojo.java new file mode 100644 index 0000000000000..8a75f1bcc1a75 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/ListPlatformsMojo.java @@ -0,0 +1,43 @@ +package io.quarkus.maven; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import io.quarkus.devtools.commands.ListPlatforms; +import io.quarkus.devtools.messagewriter.MessageWriter; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.registry.Constants; + +/** + * List imported and optionally other platforms available for the project. + */ +@Mojo(name = "list-platforms", requiresProject = false) +public class ListPlatformsMojo extends QuarkusProjectMojoBase { + + /** + * List the already installed extensions + */ + @Parameter(property = "installed", defaultValue = "false") + protected boolean installed; + + @Override + public void doExecute(final QuarkusProject quarkusProject, final MessageWriter log) throws MojoExecutionException { + if (installed) { + getImportedPlatforms().forEach(coords -> { + final StringBuilder buf = new StringBuilder(); + buf.append(coords.getGroupId()).append(":") + .append(coords.getArtifactId().substring(0, + coords.getArtifactId().length() - Constants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX.length())) + .append("::pom:").append(coords.getVersion()); + log.info(buf.toString()); + }); + return; + } + try { + new ListPlatforms(quarkusProject).execute(); + } catch (Exception e) { + throw new MojoExecutionException("Failed to list platforms", e); + } + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/ListUpdatesMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/ListUpdatesMojo.java new file mode 100644 index 0000000000000..eee1e9f479c49 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/ListUpdatesMojo.java @@ -0,0 +1,93 @@ +package io.quarkus.maven; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import io.quarkus.devtools.messagewriter.MessageWriter; +import io.quarkus.devtools.project.QuarkusProject; +import io.quarkus.registry.Constants; +import io.quarkus.registry.RegistryResolutionException; +import io.quarkus.registry.catalog.Platform; +import io.quarkus.registry.catalog.PlatformCatalog; + +/** + * List updates available for the project + */ +@Mojo(name = "list-updates", requiresProject = false) +public class ListUpdatesMojo extends QuarkusProjectMojoBase { + + /** + * List the already installed extensions + */ + @Parameter(property = "installed", defaultValue = "false") + protected boolean installed; + + @Override + public void doExecute(final QuarkusProject quarkusProject, final MessageWriter log) throws MojoExecutionException { + + final PlatformCatalog catalog; + try { + catalog = getExtensionCatalogResolver().resolvePlatformCatalog(); + } catch (RegistryResolutionException e) { + throw new MojoExecutionException("Failed to resolve the Quarkus platform catalog", e); + } + final List platforms = catalog == null ? Collections.emptyList() : catalog.getPlatforms(); + if (platforms.isEmpty()) { + return; + } + List latestList = new ArrayList<>(platforms.size()); + for (Platform p : platforms) { + final ArtifactCoords bom = p.getBom(); + latestList.add(bom); + } + + final List platformJsons = getImportedPlatforms(); + final List importedPlatformBoms = new ArrayList<>(platformJsons.size()); + for (ArtifactCoords descriptor : platformJsons) { + final ArtifactCoords importedBom = new ArtifactCoords(descriptor.getGroupId(), + descriptor.getArtifactId().substring(0, + descriptor.getArtifactId().length() - Constants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX.length()), + null, "pom", descriptor.getVersion()); + if (!latestList.remove(importedBom)) { + importedPlatformBoms.add(importedBom); + } + } + + final Map latest = new HashMap<>(platforms.size()); + for (ArtifactCoords latestBom : latestList) { + latest.put(latestBom.getKey(), latestBom.getVersion()); + } + + log.info("Available Quarkus platform updates:"); + final StringBuilder buf = new StringBuilder(0); + for (ArtifactCoords importedBom : importedPlatformBoms) { + final ArtifactKey key = importedBom.getKey(); + final String update = latest.get(key); + if (update == null || importedBom.getVersion().equals(update)) { + continue; + } + buf.setLength(0); + buf.append(key.getGroupId()).append(':').append(key.getArtifactId()); + for (int i = buf.length(); i < 45; ++i) { + buf.append(' '); + } + buf.append(importedBom.getVersion()); + for (int i = buf.length(); i < 60; ++i) { + buf.append(' '); + } + buf.append(" -> ").append(update); + log.info(buf.toString()); + } + + if (buf.length() == 0) { + log.info("No updates yet, ping @gsmet"); + } + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/MavenDevModeLauncher.java b/devtools/maven/src/main/java/io/quarkus/maven/MavenDevModeLauncher.java new file mode 100644 index 0000000000000..d50eb108099b8 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/MavenDevModeLauncher.java @@ -0,0 +1,52 @@ +package io.quarkus.maven; + +import org.apache.maven.plugin.logging.Log; + +import io.quarkus.deployment.dev.QuarkusDevModeLauncher; + +public class MavenDevModeLauncher extends QuarkusDevModeLauncher { + + /** + * Initializes the launcher builder + * + * @param java path to the java, may be null + * @param log the logger + * @return launcher builder + */ + public static Builder builder(String java, Log log) { + return new MavenDevModeLauncher(log).new Builder(java); + } + + public class Builder extends QuarkusDevModeLauncher.Builder { + + private Builder(String java) { + super(java); + } + } + + private final Log log; + + private MavenDevModeLauncher(Log log) { + this.log = log; + } + + @Override + protected boolean isDebugEnabled() { + return log.isDebugEnabled(); + } + + @Override + protected void debug(Object msg) { + log.error(msg == null ? "null" : msg.toString()); + } + + @Override + protected void error(Object msg) { + log.error(msg == null ? "null" : msg.toString()); + } + + @Override + protected void warn(Object msg) { + log.warn(msg == null ? "null" : msg.toString()); + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/MavenProjectBuildFile.java b/devtools/maven/src/main/java/io/quarkus/maven/MavenProjectBuildFile.java new file mode 100644 index 0000000000000..baa4d52148c5d --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/MavenProjectBuildFile.java @@ -0,0 +1,198 @@ +package io.quarkus.maven; + +import static io.quarkus.devtools.project.extensions.Extensions.toKey; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.Properties; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.maven.model.Dependency; +import org.apache.maven.model.DependencyManagement; +import org.apache.maven.model.Model; + +import io.quarkus.bootstrap.model.AppArtifactCoords; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.devtools.project.BuildTool; +import io.quarkus.devtools.project.buildfile.BuildFile; +import io.quarkus.maven.utilities.MojoUtils; +import io.quarkus.registry.catalog.ExtensionCatalog; + +public class MavenProjectBuildFile extends BuildFile { + + private static final Pattern PROPERTY_PATTERN = Pattern.compile("\\$\\{(.+)}"); + + private Supplier modelSupplier; + private Supplier> projectDepsSupplier; + private Supplier> projectManagedDepsSupplier; + private Properties projectProps; + protected List dependencies; + protected List managedDependencies; + protected Model model; + + public MavenProjectBuildFile(Path projectDirPath, ExtensionCatalog extensionsCatalog, Supplier model, + Supplier> projectDeps, + Supplier> projectManagedDeps, + Properties projectProps) { + super(projectDirPath, extensionsCatalog); + this.modelSupplier = model; + this.projectDepsSupplier = projectDeps; + this.projectManagedDepsSupplier = projectManagedDeps; + this.projectProps = projectProps; + } + + @Override + public BuildTool getBuildTool() { + return BuildTool.MAVEN; + } + + @Override + protected boolean addDependency(AppArtifactCoords coords, boolean managed) { + final Dependency d = new Dependency(); + d.setGroupId(coords.getGroupId()); + d.setArtifactId(coords.getArtifactId()); + if (!managed) { + d.setVersion(coords.getVersion()); + } + // When classifier is empty, you get in the pom.xml + if (coords.getClassifier() != null && !coords.getClassifier().isEmpty()) { + d.setClassifier(coords.getClassifier()); + } + d.setType(coords.getType()); + if ("pom".equalsIgnoreCase(coords.getType())) { + d.setScope("import"); + DependencyManagement dependencyManagement = model().getDependencyManagement(); + if (dependencyManagement == null) { + dependencyManagement = new DependencyManagement(); + model().setDependencyManagement(dependencyManagement); + } + if (dependencyManagement.getDependencies() + .stream() + .noneMatch(thisDep -> d.getManagementKey().equals(resolveKey(thisDep)))) { + dependencyManagement.addDependency(d); + // the effective managed dependencies set may already include it + if (!getManagedDependencies().contains(coords)) { + getManagedDependencies().add(coords); + } + return true; + } + } else if (model().getDependencies() + .stream() + .noneMatch(thisDep -> d.getManagementKey().equals(thisDep.getManagementKey()))) { + model().addDependency(d); + // it could still be a transitive dependency or inherited from the parent + if (!getDependencies().contains(coords)) { + getDependencies().add(coords); + } + return true; + } + return false; + } + + @Override + protected void removeDependency(AppArtifactKey key) throws IOException { + if (model() != null) { + final Iterator i = getDependencies().iterator(); + while (i.hasNext()) { + final AppArtifactCoords a = i.next(); + if (a.getKey().equals(key)) { + i.remove(); + break; + } + model().getDependencies().removeIf(d -> Objects.equals(toKey(d), key)); + } + } + } + + @Override + protected List getDependencies() { + if (dependencies == null) { + final List projectDeps = projectDepsSupplier.get(); + projectDepsSupplier = null; + dependencies = new ArrayList<>(projectDeps.size()); + for (org.eclipse.aether.graph.Dependency dep : projectDeps) { + org.eclipse.aether.artifact.Artifact a = dep.getArtifact(); + dependencies.add(new AppArtifactCoords(a.getGroupId(), a.getArtifactId(), a.getClassifier(), + a.getExtension(), a.getVersion())); + } + } + return dependencies; + } + + protected List getManagedDependencies() { + if (managedDependencies == null) { + final List managedDeps = projectManagedDepsSupplier.get(); + projectManagedDepsSupplier = null; + managedDependencies = new ArrayList<>(managedDeps.size()); + for (org.eclipse.aether.graph.Dependency dep : managedDeps) { + org.eclipse.aether.artifact.Artifact a = dep.getArtifact(); + managedDependencies.add(new AppArtifactCoords(a.getGroupId(), a.getArtifactId(), a.getClassifier(), + a.getExtension(), a.getVersion())); + } + } + return dependencies; + } + + @Override + protected void writeToDisk() throws IOException { + if (model == null) { + return; + } + try (ByteArrayOutputStream pomOutputStream = new ByteArrayOutputStream()) { + MojoUtils.write(model(), pomOutputStream); + writeToProjectFile(BuildTool.MAVEN.getDependenciesFile(), pomOutputStream.toByteArray()); + } + } + + @Override + protected String getProperty(String propertyName) { + return projectProps.getProperty(propertyName); + } + + @Override + protected void refreshData() { + } + + private Model model() { + if (model == null) { + model = modelSupplier.get().clone(); + modelSupplier = null; + } + return model; + } + + /** + * Resolves dependencies containing property references in the GAV + */ + private String resolveKey(Dependency dependency) { + String resolvedGroupId = toResolvedProperty(dependency.getGroupId()); + String resolvedArtifactId = toResolvedProperty(dependency.getArtifactId()); + String resolvedVersion = toResolvedProperty(dependency.getVersion()); + if (!resolvedGroupId.equals(dependency.getGroupId()) + || !resolvedArtifactId.equals(dependency.getArtifactId()) + || !resolvedVersion.equals(dependency.getVersion())) { + return resolvedGroupId + ":" + resolvedArtifactId + ":" + dependency.getType() + + (dependency.getClassifier() != null ? ":" + dependency.getClassifier() : ""); + } + return dependency.getManagementKey(); + } + + /** + * Resolves properties as ${quarkus.platform.version} + */ + private String toResolvedProperty(String value) { + Matcher matcher = PROPERTY_PATTERN.matcher(value); + if (matcher.matches()) { + String property = getProperty(matcher.group(1)); + return property == null ? value : property; + } + return value; + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java b/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java index a903de275476f..dd03ff059714c 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java @@ -3,7 +3,6 @@ import java.text.MessageFormat; import java.util.Collections; import java.util.Map; -import java.util.function.Supplier; import org.apache.maven.plugin.logging.Log; import org.apache.maven.shared.utils.logging.MessageBuilder; @@ -17,7 +16,7 @@ public class MojoLogger implements LoggerProvider { static final Object[] NO_PARAMS = new Object[0]; - public static volatile Supplier logSupplier; + public static volatile Log delegate; @Override public Logger getLogger(final String name) { @@ -25,9 +24,8 @@ public Logger getLogger(final String name) { @Override protected void doLog(final Level level, final String loggerClassName, final Object message, final Object[] parameters, final Throwable thrown) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier != null) { - Log log = logSupplier.get(); + final Log log = delegate; + if (log != null) { String text; if (parameters == null || parameters.length == 0) { text = String.valueOf(message); @@ -47,9 +45,8 @@ protected void doLog(final Level level, final String loggerClassName, final Obje @Override protected void doLogf(final Level level, final String loggerClassName, final String format, final Object[] parameters, final Throwable thrown) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier != null) { - Log log = logSupplier.get(); + final Log log = delegate; + if (log != null) { String text; if (parameters == null) { try { @@ -73,10 +70,9 @@ protected void doLogf(final Level level, final String loggerClassName, final Str @Override public boolean isEnabled(final Level level) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier == null) + final Log log = delegate; + if (log == null) return false; - Log log = logSupplier.get(); switch (level) { case FATAL: case ERROR: diff --git a/devtools/maven/src/main/java/io/quarkus/maven/NativeImageMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/NativeImageMojo.java deleted file mode 100644 index c3203c09cab6b..0000000000000 --- a/devtools/maven/src/main/java/io/quarkus/maven/NativeImageMojo.java +++ /dev/null @@ -1,414 +0,0 @@ -package io.quarkus.maven; - -import static java.util.stream.Collectors.joining; - -import java.io.File; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.plugin.AbstractMojo; -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; -import org.apache.maven.plugins.annotations.Component; -import org.apache.maven.plugins.annotations.LifecyclePhase; -import org.apache.maven.plugins.annotations.Mojo; -import org.apache.maven.plugins.annotations.Parameter; -import org.apache.maven.plugins.annotations.ResolutionScope; -import org.apache.maven.project.MavenProject; -import org.eclipse.aether.RepositorySystem; -import org.eclipse.aether.RepositorySystemSession; -import org.eclipse.aether.artifact.DefaultArtifact; -import org.eclipse.aether.repository.RemoteRepository; - -import io.quarkus.bootstrap.app.AugmentAction; -import io.quarkus.bootstrap.app.AugmentResult; -import io.quarkus.bootstrap.app.CuratedApplication; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.bootstrap.model.AppArtifact; -import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; - -/** - * Legacy mojo for backwards compatibility reasons. This should not be used in new projects - * - * This has been replaced by setting quarkus.package.type=native in the configuration. - * - * @deprecated - */ -@Mojo(name = "native-image", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.RUNTIME) -@Deprecated -public class NativeImageMojo extends AbstractMojo { - - protected static final String QUARKUS_PACKAGE_TYPE = "quarkus.package.type"; - @Parameter(defaultValue = "${project}", readonly = true, required = true) - protected MavenProject project; - - @Parameter - public File javaHome; - - @Parameter(defaultValue = "${project.build.directory}") - private File buildDir; - - /** - * The directory for compiled classes. - */ - @Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}") - private File outputDirectory; - - @Parameter - private Boolean reportErrorsAtRuntime; - - @Parameter(defaultValue = "false") - private Boolean debugSymbols; - - @Parameter(defaultValue = "${native-image.debug-build-process}") - private Boolean debugBuildProcess; - - @Parameter(defaultValue = "true") - private boolean publishDebugBuildProcessPort; - - @Parameter(readonly = true, required = true, defaultValue = "${project.build.finalName}") - private String finalName; - - @Parameter(defaultValue = "${native-image.new-server}") - private Boolean cleanupServer; - - @Parameter - private Boolean enableHttpUrlHandler; - - @Parameter - private Boolean enableHttpsUrlHandler; - - @Parameter - private Boolean enableAllSecurityServices; - - @Parameter - private Boolean enableIsolates; - - @Parameter(defaultValue = "${env.GRAALVM_HOME}") - private String graalvmHome; - - @Parameter(defaultValue = "false") - private Boolean enableServer; - - /** - * @deprecated JNI is always enabled starting from GraalVM 19.3.1. - */ - @Deprecated - @Parameter(defaultValue = "true") - private Boolean enableJni; - - @Parameter(defaultValue = "false") - private Boolean autoServiceLoaderRegistration; - - @Parameter(defaultValue = "false") - private Boolean dumpProxies; - - @Parameter(defaultValue = "${native-image.xmx}") - private String nativeImageXmx; - - @Parameter(defaultValue = "${native-image.docker-build}") - private String dockerBuild; - - @Parameter(defaultValue = "${native-image.container-runtime}") - private String containerRuntime; - - @Parameter(defaultValue = "${native-image.container-runtime-options}") - private String containerRuntimeOptions; - - @Parameter(defaultValue = "false") - private Boolean enableVMInspection; - - @Parameter(defaultValue = "true") - private Boolean fullStackTraces; - - @Deprecated - @Parameter(defaultValue = "${native-image.disable-reports}") - private Boolean disableReports; - - @Parameter(defaultValue = "${native-image.enable-reports}") - private Boolean enableReports; - - @Parameter - private List additionalBuildArgs; - - @Parameter - private Boolean addAllCharsets; - - @Parameter - private Boolean enableFallbackImages; - - @Parameter - private Boolean reportExceptionStackTraces; - - /** - * Coordinates of the Maven artifact containing the original Java application to build the native image for. - * If not provided, the current project is assumed to be the original Java application. - *

    - * The coordinates are expected to be expressed in the following format: - *

    - * groupId:artifactId:classifier:type:version - *

    - * With the classifier, type and version being optional. - *

    - * If the type is missing, the artifact is assumed to be of type JAR. - *

    - * If the version is missing, the artifact is going to be looked up among the project dependencies using the provided - * coordinates. - * - *

    - * However, if the expression consists of only three parts, it is assumed to be groupId:artifactId:version. - * - *

    - * If the expression consists of only four parts, it is assumed to be groupId:artifactId:classifier:type. - */ - @Parameter(required = false, property = "quarkus.appArtifact") - private String appArtifact; - - @Component - private RepositorySystem repoSystem; - - @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) - private RepositorySystemSession repoSession; - - @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) - private List repos; - - public NativeImageMojo() { - MojoLogger.logSupplier = this::getLog; - } - - @Override - public void execute() throws MojoExecutionException, MojoFailureException { - - if (project.getPackaging().equals("pom") && appArtifact == null) { - getLog().info("Type of the artifact is POM and appArtifact parameter has not been set, skipping native-image goal"); - return; - } - - // The runner JAR has not been built yet, so we are going to build it - final AppArtifact appCoords; - AppArtifact managingProject = null; - DefaultArtifact appMvnArtifact = null; - if (appArtifact == null) { - appMvnArtifact = new DefaultArtifact(project.getArtifact().getGroupId(), - project.getArtifact().getArtifactId(), - project.getArtifact().getClassifier(), - project.getArtifact().getArtifactHandler().getExtension(), - project.getArtifact().getVersion()); - appCoords = new AppArtifact(appMvnArtifact.getGroupId(), appMvnArtifact.getArtifactId(), - appMvnArtifact.getClassifier(), appMvnArtifact.getExtension(), - appMvnArtifact.getVersion()); - } else { - final String[] coordsArr = appArtifact.split(":"); - if (coordsArr.length < 2 || coordsArr.length > 5) { - throw new MojoExecutionException( - "appArtifact expression " + appArtifact - + " does not follow format groupId:artifactId:classifier:type:version"); - } - final String groupId = coordsArr[0]; - final String artifactId = coordsArr[1]; - String classifier = ""; - String type = "jar"; - String version = null; - if (coordsArr.length == 3) { - version = coordsArr[2]; - } else if (coordsArr.length > 3) { - classifier = coordsArr[2] == null ? "" : coordsArr[2]; - type = coordsArr[3] == null ? "jar" : coordsArr[3]; - if (coordsArr.length > 4) { - version = coordsArr[4]; - } - } - if (version == null) { - for (Artifact dep : project.getArtifacts()) { - if (dep.getArtifactId().equals(artifactId) - && dep.getGroupId().equals(groupId) - && dep.getClassifier().equals(classifier) - && dep.getType().equals(type)) { - appMvnArtifact = new DefaultArtifact(dep.getGroupId(), - dep.getArtifactId(), - dep.getClassifier(), - dep.getArtifactHandler().getExtension(), - dep.getVersion()); - break; - } - } - if (appMvnArtifact == null) { - throw new MojoExecutionException( - "Failed to locate " + appArtifact + " among the project dependencies"); - } - appCoords = new AppArtifact(appMvnArtifact.getGroupId(), appMvnArtifact.getArtifactId(), - appMvnArtifact.getClassifier(), appMvnArtifact.getExtension(), - appMvnArtifact.getVersion()); - } else { - appCoords = new AppArtifact(groupId, artifactId, classifier, type, version); - appMvnArtifact = new DefaultArtifact(groupId, artifactId, classifier, type, version); - } - managingProject = new AppArtifact(project.getArtifact().getGroupId(), - project.getArtifact().getArtifactId(), - project.getArtifact().getClassifier(), - project.getArtifact().getArtifactHandler().getExtension(), - project.getArtifact().getVersion()); - } - try { - - final Properties projectProperties = project.getProperties(); - final Properties realProperties = new Properties(); - for (String name : projectProperties.stringPropertyNames()) { - if (name.startsWith("quarkus.")) { - realProperties.setProperty(name, projectProperties.getProperty(name)); - } - } - realProperties.putIfAbsent("quarkus.application.name", project.getArtifactId()); - realProperties.putIfAbsent("quarkus.application.version", project.getVersion()); - - Map config = createCustomConfig(); - Map old = new HashMap<>(); - for (Map.Entry e : config.entrySet()) { - old.put(e.getKey(), System.getProperty(e.getKey())); - System.setProperty(e.getKey(), e.getValue()); - } - - MavenArtifactResolver resolver = MavenArtifactResolver.builder() - .setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos) - .build(); - appCoords.setPath(resolver.resolve(appMvnArtifact).getArtifact().getFile().toPath()); - - try (CuratedApplication curatedApplication = QuarkusBootstrap.builder() - .setBuildSystemProperties(realProperties) - .setAppArtifact(appCoords) - .setBaseName(finalName) - .setManagingProject(managingProject) - .setMavenArtifactResolver(resolver) - .setLocalProjectDiscovery(false) - .setBaseClassLoader(BuildMojo.class.getClassLoader()) - .setTargetDirectory(buildDir.toPath()) - .build().bootstrap()) { - - AugmentAction action = curatedApplication.createAugmentor(); - AugmentResult result = action.createProductionApplication(); - } finally { - - for (Map.Entry e : old.entrySet()) { - if (e.getValue() == null) { - System.clearProperty(e.getKey()); - } else { - System.setProperty(e.getKey(), e.getValue()); - } - } - } - - } catch (Exception e) { - throw new MojoExecutionException("Failed to generate native image", e); - } - - } - - private Map createCustomConfig() { - Map configs = new HashMap<>(); - configs.put(QUARKUS_PACKAGE_TYPE, "native"); - if (addAllCharsets != null) { - configs.put("quarkus.native.add-all-charsets", addAllCharsets.toString()); - } - if (additionalBuildArgs != null && !additionalBuildArgs.isEmpty()) { - configs.put("quarkus.native.additional-build-args", - additionalBuildArgs.stream() - .map(val -> val.replace("\\", "\\\\")) - .map(val -> val.replace(",", "\\,")) - .collect(joining(","))); - } - if (autoServiceLoaderRegistration != null) { - configs.put("quarkus.native.auto-service-loader-registration", autoServiceLoaderRegistration.toString()); - } - if (cleanupServer != null) { - configs.put("quarkus.native.cleanup-server", cleanupServer.toString()); - } - if (debugBuildProcess != null) { - configs.put("quarkus.native.debug-build-process", debugBuildProcess.toString()); - } - if (debugSymbols != null) { - configs.put("quarkus.native.debug-symbols", debugSymbols.toString()); - } - if (disableReports != null) { - configs.put("quarkus.native.enable-reports", Boolean.toString(!disableReports)); - } - if (enableReports != null) { - configs.put("quarkus.native.enable-reports", enableReports.toString()); - } - if (containerRuntime != null && !containerRuntime.trim().isEmpty()) { - configs.put("quarkus.native.container-runtime", containerRuntime); - } else if (dockerBuild != null && !dockerBuild.trim().isEmpty()) { - if (!dockerBuild.toLowerCase().equals("false")) { - if (dockerBuild.toLowerCase().equals("true")) { - configs.put("quarkus.native.container-runtime", "docker"); - } else { - configs.put("quarkus.native.container-runtime", dockerBuild); - } - } - } - if (containerRuntimeOptions != null && !containerRuntimeOptions.trim().isEmpty()) { - configs.put("quarkus.native.container-runtime-options", containerRuntimeOptions); - } - if (dumpProxies != null) { - configs.put("quarkus.native.dump-proxies", dumpProxies.toString()); - } - if (enableAllSecurityServices != null) { - configs.put("quarkus.native.enable-all-security-services", enableAllSecurityServices.toString()); - } - if (enableFallbackImages != null) { - configs.put("quarkus.native.enable-fallback-images", enableFallbackImages.toString()); - } - if (enableHttpsUrlHandler != null) { - configs.put("quarkus.native.enable-https-url-handler", enableHttpsUrlHandler.toString()); - } - if (enableHttpUrlHandler != null) { - configs.put("quarkus.native.enable-http-url-handler", enableHttpUrlHandler.toString()); - } - if (enableIsolates != null) { - configs.put("quarkus.native.enable-isolates", enableIsolates.toString()); - } - if (Boolean.FALSE.equals(enableJni)) { - getLog().warn("Your application is setting the deprecated 'enableJni' Maven option to false. Please" - + " consider removing this option as it is ignored (JNI is always enabled) and it will be removed" - + " in a future Quarkus version."); - } - - if (enableServer != null) { - configs.put("quarkus.native.enable-server", enableServer.toString()); - } - - if (enableVMInspection != null) { - configs.put("quarkus.native.enable-vm-inspection", enableVMInspection.toString()); - } - if (fullStackTraces != null) { - configs.put("quarkus.native.full-stack-traces", fullStackTraces.toString()); - } - if (graalvmHome != null && !graalvmHome.trim().isEmpty()) { - configs.put("quarkus.native.graalvm-home", graalvmHome); - } - if (javaHome != null && !javaHome.toString().isEmpty()) { - configs.put("quarkus.native.java-home", javaHome.toString()); - } - if (nativeImageXmx != null && !nativeImageXmx.trim().isEmpty()) { - configs.put("quarkus.native.native-image-xmx", nativeImageXmx); - } - if (reportErrorsAtRuntime != null) { - configs.put("quarkus.native.report-errors-at-runtime", reportErrorsAtRuntime.toString()); - } - if (reportExceptionStackTraces != null) { - configs.put("quarkus.native.report-exception-stack-traces", reportExceptionStackTraces.toString()); - } - if (publishDebugBuildProcessPort) { - configs.put("quarkus.native.publish-debug-build-process-port", - Boolean.toString(publishDebugBuildProcessPort)); - } - return configs; - - } - -} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/PrepareMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/PrepareMojo.java new file mode 100644 index 0000000000000..eedcc050d60ea --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/PrepareMojo.java @@ -0,0 +1,17 @@ +package io.quarkus.maven; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.ResolutionScope; + +@Deprecated +@Mojo(name = "prepare", defaultPhase = LifecyclePhase.GENERATE_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) +public class PrepareMojo extends GenerateCodeMojo { + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + getLog().warn("'prepare' goal is deprecated. Please use 'generate-code' instead"); + super.execute(); + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/PrepareTestsMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/PrepareTestsMojo.java new file mode 100644 index 0000000000000..7c2f0d21fc8cf --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/PrepareTestsMojo.java @@ -0,0 +1,17 @@ +package io.quarkus.maven; + +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.ResolutionScope; + +@Deprecated +@Mojo(name = "prepare-tests", defaultPhase = LifecyclePhase.GENERATE_TEST_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) +public class PrepareTestsMojo extends GenerateCodeTestsMojo { + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + getLog().warn("'prepare-tests' goal is deprecated. Please use 'generate-code-tests' instead"); + super.execute(); + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapMojo.java new file mode 100644 index 0000000000000..1af17c8a29623 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapMojo.java @@ -0,0 +1,163 @@ +package io.quarkus.maven; + +import java.io.File; +import java.util.List; + +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugin.logging.Log; +import org.apache.maven.plugins.annotations.Component; +import org.apache.maven.plugins.annotations.Parameter; +import org.apache.maven.project.MavenProject; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.impl.RemoteRepositoryManager; +import org.eclipse.aether.repository.RemoteRepository; + +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; + +public abstract class QuarkusBootstrapMojo extends AbstractMojo { + + @Component + protected QuarkusBootstrapProvider bootstrapProvider; + + /** + * The current repository/network configuration of Maven. + * + * @parameter default-value="${repositorySystemSession}" + * @readonly + */ + @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) + private RepositorySystemSession repoSession; + + /** + * The project's remote repositories to use for the resolution of artifacts and their dependencies. + * + * @parameter default-value="${project.remoteProjectRepositories}" + * @readonly + */ + @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) + private List repos; + + @Parameter(defaultValue = "${project}", readonly = true, required = true) + private MavenProject project; + + @Parameter(defaultValue = "${project.build.directory}") + private File buildDir; + + @Parameter(defaultValue = "${project.build.finalName}") + private String finalName; + + /** + * When building an uber-jar, this array specifies entries that should + * be excluded from the final jar. The entries are relative to the root of + * the file. An example of this configuration could be: + *

    +     * <configuration>
    +     *   <uberJar>true</uberJar>
    +     *   <ignoredEntries>
    +     *     <ignoredEntry>META-INF/BC2048KE.SF</ignoredEntry>
    +     *     <ignoredEntry>META-INF/BC2048KE.DSA</ignoredEntry>
    +     *     <ignoredEntry>META-INF/BC1024KE.SF</ignoredEntry>
    +     *     <ignoredEntry>META-INF/BC1024KE.DSA</ignoredEntry>
    +     *   </ignoredEntries>
    +     * </configuration>
    +     * 
    + */ + @Parameter(property = "ignoredEntries") + private String[] ignoredEntries; + + private AppArtifactKey projectId; + + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + if (!beforeExecute()) { + return; + } + doExecute(); + } + + @Override + public void setLog(Log log) { + super.setLog(log); + MojoLogger.delegate = log; + } + + /** + * This callback allows to evaluate whether this mojo should be executed, skipped or fail. + * + * @return false if the execution of the mojo should be skipped, true if the mojo should be executed + * @throws MojoExecutionException in case of a failure + * @throws MojoFailureException in case of a failure + */ + protected abstract boolean beforeExecute() throws MojoExecutionException, MojoFailureException; + + /** + * Main mojo execution code + * + * @throws MojoExecutionException in case of a failure + * @throws MojoFailureException in case of a failure + */ + protected abstract void doExecute() throws MojoExecutionException, MojoFailureException; + + protected RepositorySystem repositorySystem() { + return bootstrapProvider.repositorySystem(); + } + + protected RemoteRepositoryManager remoteRepositoryManager() { + return bootstrapProvider.remoteRepositoryManager(); + } + + protected RepositorySystemSession repositorySystemSession() { + return repoSession; + } + + protected List remoteRepositories() { + return repos; + } + + protected MavenProject mavenProject() { + return project; + } + + protected File buildDir() { + return buildDir; + } + + protected File baseDir() { + return project.getBasedir(); + } + + protected String finalName() { + return finalName; + } + + protected String[] ignoredEntries() { + return ignoredEntries; + } + + protected AppArtifactKey projectId() { + return projectId == null ? projectId = new AppArtifactKey(project.getGroupId(), project.getArtifactId()) : projectId; + } + + protected AppArtifact projectArtifact() throws MojoExecutionException { + return bootstrapProvider.projectArtifact(this); + } + + protected MavenArtifactResolver artifactResolver() throws MojoExecutionException { + return bootstrapProvider.artifactResolver(this); + } + + protected QuarkusBootstrap bootstrapQuarkus() throws MojoExecutionException { + return bootstrapProvider.bootstrapQuarkus(this); + } + + protected CuratedApplication bootstrapApplication() throws MojoExecutionException { + return bootstrapProvider.bootstrapApplication(this); + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java new file mode 100644 index 0000000000000..c827cd3f450b3 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java @@ -0,0 +1,222 @@ +package io.quarkus.maven; + +import java.io.Closeable; +import java.io.File; +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +import org.apache.maven.artifact.Artifact; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.project.MavenProject; +import org.codehaus.plexus.component.annotations.Component; +import org.codehaus.plexus.component.annotations.Requirement; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.impl.RemoteRepositoryManager; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; + +import io.quarkus.bootstrap.BootstrapException; +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.model.AppArtifactKey; +import io.quarkus.bootstrap.model.PathsCollection; +import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; + +@Component(role = QuarkusBootstrapProvider.class, instantiationStrategy = "singleton") +public class QuarkusBootstrapProvider implements Closeable { + + @Requirement(role = RepositorySystem.class, optional = false) + protected RepositorySystem repoSystem; + + @Requirement(role = RemoteRepositoryManager.class, optional = false) + protected RemoteRepositoryManager remoteRepoManager; + + private final Cache appBootstrapProviders = CacheBuilder.newBuilder() + .concurrencyLevel(4).softValues().initialCapacity(10).build(); + + public RepositorySystem repositorySystem() { + return repoSystem; + } + + public RemoteRepositoryManager remoteRepositoryManager() { + return remoteRepoManager; + } + + private QuarkusAppBootstrapProvider provider(AppArtifactKey projectId) { + try { + return appBootstrapProviders.get(projectId, () -> new QuarkusAppBootstrapProvider()); + } catch (ExecutionException e) { + throw new IllegalStateException("Failed to cache a new instance of " + QuarkusAppBootstrapProvider.class.getName(), + e); + } + } + + public MavenArtifactResolver artifactResolver(QuarkusBootstrapMojo mojo) + throws MojoExecutionException { + return provider(mojo.projectId()).artifactResolver(mojo); + } + + public AppArtifact projectArtifact(QuarkusBootstrapMojo mojo) + throws MojoExecutionException { + return provider(mojo.projectId()).projectArtifact(mojo); + } + + public QuarkusBootstrap bootstrapQuarkus(QuarkusBootstrapMojo mojo) + throws MojoExecutionException { + return provider(mojo.projectId()).bootstrapQuarkus(mojo); + } + + public CuratedApplication bootstrapApplication(QuarkusBootstrapMojo mojo) + throws MojoExecutionException { + return provider(mojo.projectId()).curateApplication(mojo); + } + + @Override + public void close() throws IOException { + if (appBootstrapProviders.size() == 0) { + return; + } + for (QuarkusAppBootstrapProvider p : appBootstrapProviders.asMap().values()) { + try { + p.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + private class QuarkusAppBootstrapProvider implements Closeable { + + private AppArtifact projectArtifact; + private MavenArtifactResolver artifactResolver; + private QuarkusBootstrap quarkusBootstrap; + private CuratedApplication curatedApp; + + private MavenArtifactResolver artifactResolver(QuarkusBootstrapMojo mojo) + throws MojoExecutionException { + if (artifactResolver != null) { + return artifactResolver; + } + try { + return artifactResolver = MavenArtifactResolver.builder() + .setWorkspaceDiscovery(false) + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(mojo.repositorySystemSession()) + .setRemoteRepositories(mojo.remoteRepositories()) + .setRemoteRepositoryManager(remoteRepoManager) + .build(); + } catch (BootstrapMavenException e) { + throw new MojoExecutionException("Failed to initialize Quarkus bootstrap Maven artifact resolver", e); + } + } + + private AppArtifact projectArtifact(QuarkusBootstrapMojo mojo) throws MojoExecutionException { + if (projectArtifact != null) { + return projectArtifact; + } + final Artifact projectArtifact = mojo.mavenProject().getArtifact(); + final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(), + projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(), + projectArtifact.getVersion()); + + File projectFile = projectArtifact.getFile(); + if (projectFile == null) { + projectFile = new File(mojo.mavenProject().getBuild().getOutputDirectory()); + if (!projectFile.exists()) { + /* + * TODO GenerateCodeMojo would fail + * if (hasSources(project)) { + * throw new MojoExecutionException("Project " + project.getArtifact() + " has not been compiled yet"); + * } + */ + if (!projectFile.mkdirs()) { + throw new MojoExecutionException("Failed to create the output dir " + projectFile); + } + } + } + appArtifact.setPaths(PathsCollection.of(projectFile.toPath())); + return appArtifact; + } + + protected QuarkusBootstrap bootstrapQuarkus(QuarkusBootstrapMojo mojo) throws MojoExecutionException { + if (quarkusBootstrap != null) { + return quarkusBootstrap; + } + + final Properties projectProperties = mojo.mavenProject().getProperties(); + final Properties effectiveProperties = new Properties(); + // quarkus. properties > ignoredEntries in pom.xml + if (mojo.ignoredEntries() != null && mojo.ignoredEntries().length > 0) { + String joinedEntries = String.join(",", mojo.ignoredEntries()); + effectiveProperties.setProperty("quarkus.package.user-configured-ignored-entries", joinedEntries); + } + for (String name : projectProperties.stringPropertyNames()) { + if (name.startsWith("quarkus.")) { + effectiveProperties.setProperty(name, projectProperties.getProperty(name)); + } + } + + effectiveProperties.putIfAbsent("quarkus.application.name", mojo.mavenProject().getArtifactId()); + effectiveProperties.putIfAbsent("quarkus.application.version", mojo.mavenProject().getVersion()); + + QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder() + .setAppArtifact(projectArtifact(mojo)) + .setMavenArtifactResolver(artifactResolver(mojo)) + .setIsolateDeployment(true) + .setBaseClassLoader(getClass().getClassLoader()) + .setBuildSystemProperties(effectiveProperties) + .setLocalProjectDiscovery(false) + .setProjectRoot(mojo.baseDir().toPath()) + .setBaseName(mojo.finalName()) + .setTargetDirectory(mojo.buildDir().toPath()); + + for (MavenProject project : mojo.mavenProject().getCollectedProjects()) { + builder.addLocalArtifact(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null, + project.getArtifact().getArtifactHandler().getExtension())); + } + + return quarkusBootstrap = builder.build(); + } + + protected CuratedApplication curateApplication(QuarkusBootstrapMojo mojo) throws MojoExecutionException { + if (curatedApp != null) { + return curatedApp; + } + try { + return curatedApp = bootstrapQuarkus(mojo).bootstrap(); + } catch (MojoExecutionException e) { + throw e; + } catch (BootstrapException e) { + throw new MojoExecutionException("Failed to bootstrap the application", e); + } + } + + @Override + public void close() throws IOException { + if (curatedApp != null) { + curatedApp.close(); + curatedApp = null; + } + this.artifactResolver = null; + this.quarkusBootstrap = null; + } + } + + /* + * private static boolean hasSources(MavenProject project) { + * if (new File(project.getBuild().getSourceDirectory()).exists()) { + * return true; + * } + * for (Resource r : project.getBuild().getResources()) { + * if (new File(r.getDirectory()).exists()) { + * return true; + * } + * } + * return false; + * } + */ +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusProjectMojoBase.java b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusProjectMojoBase.java index 2b3d192471a93..6b8e374e3e640 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusProjectMojoBase.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusProjectMojoBase.java @@ -1,14 +1,15 @@ package io.quarkus.maven; +import java.io.BufferedWriter; import java.io.IOException; +import java.io.StringWriter; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.apache.maven.model.Dependency; -import org.apache.maven.model.DependencyManagement; -import org.apache.maven.model.Model; -import org.apache.maven.model.Parent; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; @@ -18,21 +19,27 @@ import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; +import org.eclipse.aether.graph.DependencyNode; +import org.eclipse.aether.graph.DependencyVisitor; +import org.eclipse.aether.impl.RemoteRepositoryManager; import org.eclipse.aether.repository.RemoteRepository; -import org.eclipse.aether.resolution.ArtifactResult; +import org.eclipse.aether.resolution.ArtifactDescriptorResult; -import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; +import io.quarkus.bootstrap.BootstrapConstants; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; -import io.quarkus.bootstrap.resolver.maven.workspace.ModelUtils; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.BuildTool; import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.descriptor.CombinedQuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.resolver.json.QuarkusJsonPlatformDescriptorResolver; -import io.quarkus.platform.tools.MessageWriter; +import io.quarkus.devtools.project.QuarkusProjectHelper; +import io.quarkus.platform.descriptor.loader.json.ClassPathResourceLoader; import io.quarkus.platform.tools.ToolsConstants; +import io.quarkus.platform.tools.ToolsUtils; import io.quarkus.platform.tools.maven.MojoMessageWriter; +import io.quarkus.registry.ExtensionCatalogResolver; +import io.quarkus.registry.RegistryResolutionException; +import io.quarkus.registry.catalog.ExtensionCatalog; +import io.quarkus.registry.catalog.Platform; public abstract class QuarkusProjectMojoBase extends AbstractMojo { @@ -48,7 +55,7 @@ public abstract class QuarkusProjectMojoBase extends AbstractMojo { @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) protected List repos; - @Parameter(property = "bomGroupId", defaultValue = ToolsConstants.DEFAULT_PLATFORM_BOM_GROUP_ID) + @Parameter(property = "bomGroupId", required = false) private String bomGroupId; @Parameter(property = "bomArtifactId", required = false) @@ -57,95 +64,210 @@ public abstract class QuarkusProjectMojoBase extends AbstractMojo { @Parameter(property = "bomVersion", required = false) private String bomVersion; + @Component + RemoteRepositoryManager remoteRepositoryManager; + + @Parameter(property = "enableRegistryClient") + private boolean enableRegistryClient; + + private List importedPlatforms; + + private Artifact projectArtifact; + private ArtifactDescriptorResult projectDescr; + private MavenArtifactResolver artifactResolver; + private ExtensionCatalogResolver catalogResolver; + private MessageWriter log; + @Override public void execute() throws MojoExecutionException { // Validate Mojo parameters validateParameters(); - final MessageWriter log = new MojoMessageWriter(getLog()); - final Path projectDirPath = project.getBasedir().toPath(); - final BuildTool buildTool = QuarkusProject.resolveExistingProjectBuildTool(projectDirPath); - final QuarkusPlatformDescriptor platformDescriptor = resolvePlatformDescriptor(log); + final Path projectDirPath = baseDir(); + BuildTool buildTool = QuarkusProject.resolveExistingProjectBuildTool(projectDirPath); + if (buildTool == null) { + // it's not Gradle and the pom.xml not found, so we assume there is not project at all + buildTool = BuildTool.MAVEN; + } + + final ExtensionCatalog catalog = resolveExtensionsCatalog(); + final ClassPathResourceLoader codestartsResourceLoader = QuarkusProjectHelper.getResourceLoader(catalog, + artifactResolver()); + final QuarkusProject quarkusProject; + if (BuildTool.MAVEN.equals(buildTool) && project.getFile() != null) { + quarkusProject = QuarkusProject.of(baseDir(), catalog, codestartsResourceLoader, getMessageWriter(), + new MavenProjectBuildFile(baseDir(), catalog, () -> project.getOriginalModel(), + () -> { + try { + return projectDependencies(); + } catch (MojoExecutionException e) { + throw new RuntimeException(e); + } + }, + () -> { + try { + return projectDescriptor().getManagedDependencies(); + } catch (MojoExecutionException e) { + throw new RuntimeException(e); + } + }, + project.getModel().getProperties())); + } else { + quarkusProject = QuarkusProject.of(baseDir(), catalog, codestartsResourceLoader, log, buildTool); + } - doExecute(QuarkusProject.of(project.getBasedir().toPath(), platformDescriptor, buildTool), log); + doExecute(quarkusProject, getMessageWriter()); } - private QuarkusPlatformDescriptor resolvePlatformDescriptor(final MessageWriter log) throws MojoExecutionException { - // Resolve and setup the platform descriptor - try { - final MavenArtifactResolver mvn = MavenArtifactResolver.builder().setRepositorySystem(repoSystem) - .setRepositorySystemSession(repoSession) - .setRemoteRepositories(repos).build(); - if (project.getFile() != null) { - final List descrArtifactList = new ArrayList<>(2); - for (Dependency dep : getManagedDependencies(mvn)) { - if ((dep.getScope() == null || !dep.getScope().equals("import")) - && (dep.getType() == null || !dep.getType().equals("pom"))) { - continue; - } - // We don't know which BOM is the platform one, so we are trying every BOM here - final String bomVersion = resolveValue(dep.getVersion()); - final String bomGroupId = resolveValue(dep.getGroupId()); - final String bomArtifactId = resolveValue(dep.getArtifactId()); - if (bomVersion == null || bomGroupId == null || bomArtifactId == null) { - continue; - } + protected MessageWriter getMessageWriter() { + return log == null ? log = new MojoMessageWriter(getLog()) : log; + } + + private ArtifactDescriptorResult projectDescriptor() throws MojoExecutionException { + if (this.projectDescr == null) { + try { + projectDescr = artifactResolver.resolveDescriptor(projectArtifact()); + } catch (Exception e) { + throw new MojoExecutionException("Failed to read the artifact desriptor for the project", e); + } + } + return projectDescr; + } + + protected Path baseDir() { + return project == null || project.getBasedir() == null ? Paths.get("").normalize().toAbsolutePath() + : project.getBasedir().toPath(); + } - final Artifact jsonArtifact = resolveJsonOrNull(mvn, bomGroupId, bomArtifactId, bomVersion); - if (jsonArtifact != null) { - descrArtifactList.add(jsonArtifact); + protected boolean isLimitExtensionsToImportedPlatforms() { + return false; + } + + private ExtensionCatalog resolveExtensionsCatalog() throws MojoExecutionException { + final ExtensionCatalogResolver catalogResolver = enableRegistryClient ? getExtensionCatalogResolver() + : ExtensionCatalogResolver.empty(); + if (catalogResolver.hasRegistries()) { + try { + return isLimitExtensionsToImportedPlatforms() + ? catalogResolver.resolveExtensionCatalog(getImportedPlatforms()) + : catalogResolver.resolveExtensionCatalog(getQuarkusCoreVersion()); + } catch (Exception e) { + throw new MojoExecutionException("Failed to resolve the Quarkus extensions catalog", e); + } + } + return ToolsUtils.mergePlatforms(collectImportedPlatforms(), artifactResolver()); + } + + protected ExtensionCatalogResolver getExtensionCatalogResolver() throws MojoExecutionException { + return catalogResolver == null + ? catalogResolver = QuarkusProjectHelper.getCatalogResolver(artifactResolver(), getMessageWriter()) + : catalogResolver; + } + + protected List getImportedPlatforms() throws MojoExecutionException { + if (importedPlatforms == null) { + if (project.getFile() == null) { + if (bomGroupId == null && bomArtifactId == null && bomVersion == null) { + return Collections.emptyList(); + } + if (bomGroupId == null) { + bomGroupId = ToolsConstants.DEFAULT_PLATFORM_BOM_GROUP_ID; + } + final ExtensionCatalogResolver catalogResolver = getExtensionCatalogResolver(); + ArtifactCoords platformBom = null; + List matches = null; + try { + for (Platform p : catalogResolver.resolvePlatformCatalog().getPlatforms()) { + final ArtifactCoords bom = p.getBom(); + if (bomGroupId != null && !bom.getGroupId().equals(bomGroupId)) { + continue; + } + if (bomArtifactId != null && !bom.getArtifactId().equals(bomArtifactId)) { + continue; + } + if (bomVersion != null && !bom.getVersion().equals(bomVersion)) { + continue; + } + if (platformBom == null) { + platformBom = bom; + } else { + if (matches == null) { + matches = new ArrayList<>(); + matches.add(platformBom); + } + matches.add(bom); + } } + } catch (RegistryResolutionException e) { + throw new MojoExecutionException("Failed to resolve the catalog of Quarkus platforms", e); } - if (!descrArtifactList.isEmpty()) { - if (descrArtifactList.size() == 1) { - return loadPlatformDescriptor(mvn, log, descrArtifactList.get(0)); - } else { - final CombinedQuarkusPlatformDescriptor.Builder builder = CombinedQuarkusPlatformDescriptor.builder(); - for (Artifact descrArtifact : descrArtifactList) { - builder.addPlatform(loadPlatformDescriptor(mvn, log, descrArtifact)); + if (matches != null) { + final StringWriter buf = new StringWriter(); + buf.append("Found multiple platforms matching the provided arguments: "); + try (BufferedWriter writer = new BufferedWriter(buf)) { + for (ArtifactCoords coords : matches) { + writer.newLine(); + writer.append("- ").append(coords.toString()); } - return builder.build(); + } catch (IOException e) { + buf.append(matches.toString()); } + throw new MojoExecutionException(buf.toString()); } + return importedPlatforms = Collections.singletonList(platformBom); } - return CreateUtils.resolvePlatformDescriptor(bomGroupId, bomArtifactId, bomVersion, mvn, getLog()); - } catch (Exception e) { - throw new MojoExecutionException("Failed to initialize maven artifact resolver", e); + importedPlatforms = collectImportedPlatforms(); } + return importedPlatforms; } - private QuarkusPlatformDescriptor loadPlatformDescriptor(final MavenArtifactResolver mvn, final MessageWriter log, - Artifact descrArtifact) { - return QuarkusJsonPlatformDescriptorResolver.newInstance() - .setArtifactResolver(new BootstrapAppModelResolver(mvn)) - .setMessageWriter(log) - .resolveFromJson(descrArtifact.getFile().toPath()); + private MavenArtifactResolver artifactResolver() throws MojoExecutionException { + if (artifactResolver == null) { + try { + artifactResolver = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setRemoteRepositoryManager(remoteRepositoryManager) + .build(); + } catch (BootstrapMavenException e) { + throw new MojoExecutionException("Failed to initialize Maven artifact resolver", e); + } + } + return artifactResolver; } - private Artifact resolveJsonOrNull(MavenArtifactResolver mvn, String bomGroupId, String bomArtifactId, String bomVersion) { - Artifact jsonArtifact = new DefaultArtifact(bomGroupId, bomArtifactId, null, "json", bomVersion); - try { - jsonArtifact = mvn.resolve(jsonArtifact).getArtifact(); - } catch (Exception e) { - if (getLog().isDebugEnabled()) { - getLog().debug("Failed to resolve JSON descriptor as " + jsonArtifact); - } - jsonArtifact = new DefaultArtifact(bomGroupId, bomArtifactId + "-descriptor-json", null, "json", - bomVersion); - try { - jsonArtifact = mvn.resolve(jsonArtifact).getArtifact(); - } catch (Exception e1) { - if (getLog().isDebugEnabled()) { - getLog().debug("Failed to resolve JSON descriptor as " + jsonArtifact); + private List collectImportedPlatforms() + throws MojoExecutionException { + final List descriptors = new ArrayList<>(4); + final List constraints = project.getDependencyManagement() == null ? Collections.emptyList() + : project.getDependencyManagement().getDependencies(); + if (!constraints.isEmpty()) { + final MessageWriter log = getMessageWriter(); + for (Dependency d : constraints) { + if (!("json".equals(d.getType()) + && d.getArtifactId().endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX))) { + continue; } - return null; + final ArtifactCoords a = new ArtifactCoords(d.getGroupId(), d.getArtifactId(), d.getClassifier(), + d.getType(), d.getVersion()); + descriptors.add(a); + log.debug("Found platform descriptor %s", a); } } - if (getLog().isDebugEnabled()) { - getLog().debug("Resolve JSON descriptor " + jsonArtifact); + return descriptors; + } + + private String getQuarkusCoreVersion() { + final List constraints = project.getDependencyManagement() == null ? Collections.emptyList() + : project.getDependencyManagement().getDependencies(); + for (Dependency d : constraints) { + if (d.getArtifactId().endsWith("quarkus-core") && d.getGroupId().equals("io.quarkus")) { + return d.getVersion(); + } } - return jsonArtifact; + return null; } protected void validateParameters() throws MojoExecutionException { @@ -154,50 +276,34 @@ protected void validateParameters() throws MojoExecutionException { protected abstract void doExecute(QuarkusProject quarkusProject, MessageWriter log) throws MojoExecutionException; - private String resolveValue(String expr) throws IOException { - if (expr.startsWith("${") && expr.endsWith("}")) { - final String name = expr.substring(2, expr.length() - 1); - final String v = project.getModel().getProperties().getProperty(name); - if (v == null) { - if (getLog().isDebugEnabled()) { - getLog().debug("Failed to resolve property " + name); - } - } - return v; + private List projectDependencies() throws MojoExecutionException { + final List deps = new ArrayList<>(); + try { + artifactResolver().collectDependencies(projectArtifact(), Collections.emptyList()) + .getRoot().accept(new DependencyVisitor() { + @Override + public boolean visitEnter(DependencyNode node) { + if (node.getDependency() != null) { + deps.add(node.getDependency()); + } + return true; + } + + @Override + public boolean visitLeave(DependencyNode node) { + return true; + } + }); + } catch (Exception e) { + throw new MojoExecutionException("Failed to collect dependencies for the project", e); } - return expr; + return deps; } - private List getManagedDependencies(MavenArtifactResolver resolver) throws IOException { - List managedDependencies = new ArrayList<>(); - Model model = project.getOriginalModel(); - DependencyManagement managed = model.getDependencyManagement(); - if (managed != null) { - managedDependencies.addAll(managed.getDependencies()); - } - Parent parent; - while ((parent = model.getParent()) != null) { - try { - ArtifactResult result = resolver.resolve(new DefaultArtifact( - parent.getGroupId(), - parent.getArtifactId(), - "pom", - ModelUtils.resolveVersion(parent.getVersion(), model))); - model = ModelUtils.readModel(result.getArtifact().getFile().toPath()); - managed = model.getDependencyManagement(); - if (managed != null) { - // Alexey Loubyansky: In Maven whatever is imported first has a priority - // So to match the maven way, we should be reading the root parent first - managedDependencies.addAll(0, managed.getDependencies()); - } - } catch (BootstrapMavenException e) { - // ignore - if (getLog().isDebugEnabled()) { - getLog().debug("Error while resolving descriptor", e); - } - break; - } - } - return managedDependencies; + private Artifact projectArtifact() { + return projectArtifact == null + ? projectArtifact = new DefaultArtifact(project.getGroupId(), project.getArtifactId(), null, "pom", + project.getVersion()) + : projectArtifact; } } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/RemoteDevMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/RemoteDevMojo.java index 14b0b1505c986..a2828b3bc2755 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/RemoteDevMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/RemoteDevMojo.java @@ -4,20 +4,13 @@ import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.ResolutionScope; -import io.quarkus.bootstrap.app.QuarkusBootstrap; -import io.quarkus.deployment.dev.DevModeContext; -import io.quarkus.deployment.dev.IsolatedRemoteDevModeMain; - /** * The dev mojo, that connects to a remote host. */ @Mojo(name = "remote-dev", defaultPhase = LifecyclePhase.PREPARE_PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME) public class RemoteDevMojo extends DevMojo { @Override - protected void modifyDevModeContext(DevModeContext devModeContext) { - //we use prod mode here as we are going to generate a production - //application to sync to the server - devModeContext.setMode(QuarkusBootstrap.Mode.PROD); - devModeContext.setAlternateEntryPoint(IsolatedRemoteDevModeMain.class.getName()); + protected void modifyDevModeContext(MavenDevModeLauncher.Builder builder) { + builder.remoteDev(true); } } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/RemoveExtensionMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/RemoveExtensionMojo.java index 165c951bc4cd6..6680d4152ec0c 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/RemoveExtensionMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/RemoveExtensionMojo.java @@ -12,8 +12,8 @@ import io.quarkus.devtools.commands.RemoveExtensions; import io.quarkus.devtools.commands.data.QuarkusCommandOutcome; +import io.quarkus.devtools.messagewriter.MessageWriter; import io.quarkus.devtools.project.QuarkusProject; -import io.quarkus.platform.tools.MessageWriter; /** * Allow removing an extension from an existing pom.xml file. diff --git a/devtools/maven/src/main/java/io/quarkus/maven/components/BootstrapSessionListener.java b/devtools/maven/src/main/java/io/quarkus/maven/components/BootstrapSessionListener.java new file mode 100644 index 0000000000000..167d069100176 --- /dev/null +++ b/devtools/maven/src/main/java/io/quarkus/maven/components/BootstrapSessionListener.java @@ -0,0 +1,27 @@ +package io.quarkus.maven.components; + +import java.io.IOException; + +import org.apache.maven.AbstractMavenLifecycleParticipant; +import org.apache.maven.MavenExecutionException; +import org.apache.maven.execution.MavenSession; +import org.codehaus.plexus.component.annotations.Component; +import org.codehaus.plexus.component.annotations.Requirement; + +import io.quarkus.maven.QuarkusBootstrapProvider; + +@Component(role = AbstractMavenLifecycleParticipant.class, hint = "quarkus-bootstrap") +public class BootstrapSessionListener extends AbstractMavenLifecycleParticipant { + + @Requirement(optional = false) + protected QuarkusBootstrapProvider bootstrapProvider; + + @Override + public void afterSessionEnd(MavenSession session) throws MavenExecutionException { + try { + bootstrapProvider.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } +} diff --git a/devtools/maven/src/main/java/io/quarkus/maven/components/Prompter.java b/devtools/maven/src/main/java/io/quarkus/maven/components/Prompter.java index 017f8ecfc5fec..f83db7c27609e 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/components/Prompter.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/components/Prompter.java @@ -3,65 +3,21 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.Objects; -import org.apache.commons.lang3.StringUtils; import org.codehaus.plexus.component.annotations.Component; -import jline.console.ConsoleReader; - /** * Prompt implementation. * * @author Clement Escoffier */ @Component(role = Prompter.class, instantiationStrategy = "per-lookup") -public class Prompter { - - private final ConsoleReader console; - +public class Prompter extends io.quarkus.devtools.utils.Prompter { public Prompter() throws IOException { - this.console = new ConsoleReader(); - console.setHistoryEnabled(false); - console.setExpandEvents(false); + super(); } public Prompter(InputStream in, OutputStream out) throws IOException { - this.console = new ConsoleReader(in, out); - console.setHistoryEnabled(false); - console.setExpandEvents(false); - } - - public ConsoleReader getConsole() { - return console; - } - - public String prompt(final String message, final Character mask) throws IOException { - Objects.requireNonNull(message); - - final String prompt = String.format("%s: ", message); - String value; - do { - value = console.readLine(prompt, mask); - } while (StringUtils.isBlank(value)); - return value; - } - - public String prompt(final String message) throws IOException { - Objects.requireNonNull(message); - return prompt(message, null); + super(in, out); } - - public String promptWithDefaultValue(final String message, final String defaultValue) throws IOException { - Objects.requireNonNull(message); - Objects.requireNonNull(defaultValue); - - final String prompt = String.format("%s [%s]: ", message, defaultValue); - String value = console.readLine(prompt); - if (StringUtils.isBlank(value)) { - return defaultValue; - } - return value; - } - } diff --git a/devtools/maven/src/main/java/io/quarkus/platform/tools/maven/MojoMessageWriter.java b/devtools/maven/src/main/java/io/quarkus/platform/tools/maven/MojoMessageWriter.java index 175c5d313afd6..2c3de55fd295c 100644 --- a/devtools/maven/src/main/java/io/quarkus/platform/tools/maven/MojoMessageWriter.java +++ b/devtools/maven/src/main/java/io/quarkus/platform/tools/maven/MojoMessageWriter.java @@ -2,7 +2,7 @@ import org.apache.maven.plugin.logging.Log; -import io.quarkus.platform.tools.MessageWriter; +import io.quarkus.devtools.messagewriter.MessageWriter; public class MojoMessageWriter implements MessageWriter { diff --git a/devtools/maven/src/main/resources/META-INF/m2e/lifecycle-mapping-metadata.xml b/devtools/maven/src/main/resources/META-INF/m2e/lifecycle-mapping-metadata.xml new file mode 100644 index 0000000000000..d174ca348de41 --- /dev/null +++ b/devtools/maven/src/main/resources/META-INF/m2e/lifecycle-mapping-metadata.xml @@ -0,0 +1,18 @@ + + + + + + generate-code + generate-code-tests + + + + + false + true + + + + + \ No newline at end of file diff --git a/devtools/maven/src/main/resources/create-extension-templates/DevModeTest.java b/devtools/maven/src/main/resources/create-extension-templates/DevModeTest.java new file mode 100644 index 0000000000000..bea249e22f50b --- /dev/null +++ b/devtools/maven/src/main/resources/create-extension-templates/DevModeTest.java @@ -0,0 +1,22 @@ +package [=javaPackageBase].test; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusDevModeTest; + +public class [=artifactIdBaseCamelCase]DevModeTest { + @RegisterExtension + static final QuarkusDevModeTest devModeTest = new QuarkusDevModeTest() // Start hot reload (DevMode) test with your extension loaded + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)); + + @Test + public void test() { + // Write your tests here - see the testing extension guide https://quarkus.io/guides/writing-extensions#testing-hot-reload for more information + Assertions.fail("Add dev mode assertions to " + getClass().getName()); + } + +} diff --git a/devtools/maven/src/main/resources/create-extension-templates/TestResource.java b/devtools/maven/src/main/resources/create-extension-templates/TestResource.java index f8b8d430654c0..5924cb6974287 100644 --- a/devtools/maven/src/main/resources/create-extension-templates/TestResource.java +++ b/devtools/maven/src/main/resources/create-extension-templates/TestResource.java @@ -6,7 +6,7 @@ * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/devtools/maven/src/main/resources/create-extension-templates/UnitTest.java b/devtools/maven/src/main/resources/create-extension-templates/UnitTest.java new file mode 100644 index 0000000000000..67c7dc6d47196 --- /dev/null +++ b/devtools/maven/src/main/resources/create-extension-templates/UnitTest.java @@ -0,0 +1,23 @@ +package [=javaPackageBase].test; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +class [=artifactIdBaseCamelCase]Test { + + @RegisterExtension + static final QuarkusUnitTest unitTest = new QuarkusUnitTest() // Start unit test with your extension loaded + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)); + + @Test + public void test() { + // Write your tests here - see the testing extension guide https://quarkus.io/guides/writing-extensions#testing-extensions for more information + Assertions.fail("Add some assertions to " + getClass().getName()); + } + +} diff --git a/devtools/maven/src/main/resources/create-extension-templates/deployment-pom.xml b/devtools/maven/src/main/resources/create-extension-templates/deployment-pom.xml index b98ab19f868ab..0750743c84ba9 100644 --- a/devtools/maven/src/main/resources/create-extension-templates/deployment-pom.xml +++ b/devtools/maven/src/main/resources/create-extension-templates/deployment-pom.xml @@ -25,6 +25,11 @@ [#if !assumeManaged ] [=r"$"]{project.version} [/#if] + + io.quarkus + quarkus-junit5-internal + test + diff --git a/devtools/maven/src/main/resources/create-extension-templates/integration-test-pom.xml b/devtools/maven/src/main/resources/create-extension-templates/integration-test-pom.xml index d3d7f28fb129b..97b4311c56b2d 100644 --- a/devtools/maven/src/main/resources/create-extension-templates/integration-test-pom.xml +++ b/devtools/maven/src/main/resources/create-extension-templates/integration-test-pom.xml @@ -70,6 +70,9 @@ native + + native + @@ -98,28 +101,6 @@ - - io.quarkus - quarkus-maven-plugin - - - native-image - - native-image - - - false - true - true - false - false - ${graalvmHome} - true - false - - - - diff --git a/devtools/maven/src/main/resources/create-extension-templates/parent-pom.xml b/devtools/maven/src/main/resources/create-extension-templates/parent-pom.xml index 4dd531f8457e8..cfc99377dfd3b 100644 --- a/devtools/maven/src/main/resources/create-extension-templates/parent-pom.xml +++ b/devtools/maven/src/main/resources/create-extension-templates/parent-pom.xml @@ -7,11 +7,13 @@ [=grandParentGroupId] [=grandParentArtifactId] [=grandParentVersion] - [=grandParentRelativePath] +[#if grandParentRelativePath?? ] [=grandParentRelativePath] +[/#if] [/#if] -[#if grandParentGroupId?? ][#if groupId != grandParentGroupId ] [=groupId][/#if][#else ] [=groupId] +[#if grandParentGroupId?? ][#if groupId != grandParentGroupId ] [=groupId] +[/#if][#else ] [=groupId] [/#if] [=artifactId]-parent [#if groupId?? && (!grandParentGroupId?? || groupId != grandParentGroupId) && version?? && (!grandParentVersion?? || version != grandParentVersion) ] [=version] diff --git a/devtools/maven/src/main/resources/create-extension-templates/runtime-pom.xml b/devtools/maven/src/main/resources/create-extension-templates/runtime-pom.xml index 91cbc7ee11ba7..18819ba59b719 100644 --- a/devtools/maven/src/main/resources/create-extension-templates/runtime-pom.xml +++ b/devtools/maven/src/main/resources/create-extension-templates/runtime-pom.xml @@ -20,7 +20,7 @@ [=dep.groupId] [=dep.artifactId] -[#if !runtimeBomPathSet ] [=dep.version] +[#if !bomPathSet ] [=dep.version] [/#if] [#if dep.type?? ] [=dep.type] [/#if] diff --git a/devtools/platform-descriptor-json-plugin/pom.xml b/devtools/platform-descriptor-json-plugin/pom.xml index e6482dffd3783..3959a47c3c77c 100644 --- a/devtools/platform-descriptor-json-plugin/pom.xml +++ b/devtools/platform-descriptor-json-plugin/pom.xml @@ -36,11 +36,6 @@ org.wildfly.common wildfly-common - - io.quarkus - quarkus-platform-descriptor-resolver-json - ${project.version} - org.apache.maven.plugin-tools maven-plugin-annotations @@ -65,7 +60,7 @@ org.jboss.slf4j - slf4j-jboss-logging + slf4j-jboss-logmanager diff --git a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GenerateExtensionsJsonMojo.java b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GenerateExtensionsJsonMojo.java index 5188ff543ba11..3764f2d7dd899 100644 --- a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GenerateExtensionsJsonMojo.java +++ b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GenerateExtensionsJsonMojo.java @@ -12,8 +12,10 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import javax.json.Json; import javax.json.JsonArray; @@ -27,6 +29,7 @@ import javax.json.JsonWriterFactory; import javax.json.stream.JsonGenerator; +import org.apache.maven.model.Model; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; @@ -47,27 +50,37 @@ import org.eclipse.aether.resolution.ArtifactResolutionException; import org.eclipse.aether.resolution.ArtifactResult; -import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.json.JsonReadFeature; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; +import com.fasterxml.jackson.databind.PropertyNamingStrategies; import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.resolver.maven.workspace.LocalWorkspace; +import io.quarkus.bootstrap.resolver.maven.workspace.ModelUtils; +import io.quarkus.bootstrap.util.IoUtils; import io.quarkus.bootstrap.util.ZipUtils; -import io.quarkus.dependencies.Extension; import io.quarkus.platform.tools.ToolsConstants; /** * This goal generates a list of extensions for a given BOM * and stores it in a JSON format file that is later used by the tools * as the catalog of available extensions. + * + * @deprecated in favor of {@link GeneratePlatformDescriptorJsonMojo} */ +@Deprecated @Mojo(name = "generate-extensions-json") public class GenerateExtensionsJsonMojo extends AbstractMojo { + private static final String GROUP_ID = "group-id"; + private static final String ARTIFACT_ID = "artifact-id"; + private static final String VERSION = "version"; + @Parameter(property = "bomGroupId", defaultValue = "${project.groupId}") private String bomGroupId; @@ -81,7 +94,7 @@ public class GenerateExtensionsJsonMojo extends AbstractMojo { @Parameter(property = "overridesFile", defaultValue = "${project.basedir}/src/main/resources/extensions-overrides.json") private String overridesFile; - @Parameter(property = "outputFile", defaultValue = "${project.build.directory}/extensions.json") + @Parameter(property = "outputFile", defaultValue = "${project.build.directory}/${project.artifactId}-${project.version}-${project.version}.json") private File outputFile; @Component @@ -98,33 +111,65 @@ public class GenerateExtensionsJsonMojo extends AbstractMojo { @Component private MavenProjectHelper projectHelper; - public GenerateExtensionsJsonMojo() { - MojoLogger.logSupplier = this::getLog; - } + /** + * Group ID's that we know don't contain extensions. This can speed up the process + * by preventing the download of artifacts that are not required. + */ + @Parameter + private Set ignoredGroupIds = new HashSet<>(0); + + @Parameter + private boolean skipArtifactIdCheck; + + @Parameter(property = "skipBomCheck") + private boolean skipBomCheck; + + @Parameter(property = "resolveDependencyManagement") + boolean resolveDependencyManagement; @Override public void execute() throws MojoExecutionException, MojoFailureException { + final Artifact jsonArtifact = new DefaultArtifact(project.getGroupId(), project.getArtifactId(), project.getVersion(), + "json", project.getVersion()); + if (!skipArtifactIdCheck) { + final String expectedArtifactId = bomArtifactId + BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX; + if (!jsonArtifact.getArtifactId().equals(expectedArtifactId)) { + throw new MojoExecutionException( + "The project's artifactId " + project.getArtifactId() + " is expected to be " + expectedArtifactId); + } + if (!jsonArtifact.getGroupId().equals(bomGroupId)) { + throw new MojoExecutionException("The project's groupId " + project.getGroupId() + + " is expected to match the groupId of the BOM which is " + bomGroupId); + } + if (!jsonArtifact.getVersion().equals(bomVersion)) { + throw new MojoExecutionException("The project's version " + project.getVersion() + + " is expected to match the version of the BOM which is " + bomVersion); + } + } + // Get the BOM artifact final DefaultArtifact bomArtifact = new DefaultArtifact(bomGroupId, bomArtifactId, "", "pom", bomVersion); info("Generating catalog of extensions for %s", bomArtifact); - // And get all its dependencies (which are extensions) - final List deps; - try { - deps = repoSystem - .readArtifactDescriptor(repoSession, - new ArtifactDescriptorRequest().setRepositories(repos).setArtifact(bomArtifact)) - .getManagedDependencies(); - } catch (ArtifactDescriptorException e) { - throw new MojoExecutionException("Failed to read descriptor of " + bomArtifact, e); + // if the BOM is generated and has replaced the original one, to pick up the generated content + // we should read the dependencyManagement from the generated pom.xml + List deps; + if (resolveDependencyManagement) { + getLog().debug("Resolving dependencyManagement from the artifact descriptor"); + deps = dependencyManagementFromDescriptor(bomArtifact); + } else { + deps = dependencyManagementFromProject(); + if (deps == null) { + deps = dependencyManagementFromResolvedPom(bomArtifact); + } } if (deps.isEmpty()) { getLog().warn("BOM " + bomArtifact + " does not include any dependency"); return; } - List allOverrides = new ArrayList(); + List allOverrides = new ArrayList<>(); for (String path : overridesFile.split(",")) { OverrideInfo overrideInfo = getOverrideInfo(new File(path.trim())); if (overrideInfo != null) { @@ -135,14 +180,26 @@ public void execute() throws MojoExecutionException, MojoFailureException { // Create a JSON array of extension descriptors final JsonArrayBuilder extListJson = Json.createArrayBuilder(); String quarkusCoreVersion = null; + boolean jsonFoundInBom = false; for (Dependency dep : deps) { final Artifact artifact = dep.getArtifact(); - if (!artifact.getExtension().equals("jar") + + if (!skipBomCheck && !jsonFoundInBom) { + jsonFoundInBom = artifact.getArtifactId().equals(jsonArtifact.getArtifactId()) + && artifact.getGroupId().equals(jsonArtifact.getGroupId()) + && artifact.getExtension().equals(jsonArtifact.getExtension()) + && artifact.getClassifier().equals(jsonArtifact.getClassifier()) + && artifact.getVersion().equals(jsonArtifact.getVersion()); + } + + if (ignoredGroupIds.contains(artifact.getGroupId()) + || !artifact.getExtension().equals("jar") || "javadoc".equals(artifact.getClassifier()) || "tests".equals(artifact.getClassifier()) || "sources".equals(artifact.getClassifier())) { continue; } + if (artifact.getArtifactId().equals(ToolsConstants.QUARKUS_CORE_ARTIFACT_ID) && artifact.getGroupId().equals(ToolsConstants.QUARKUS_CORE_GROUP_ID)) { quarkusCoreVersion = artifact.getVersion(); @@ -170,6 +227,10 @@ public void execute() throws MojoExecutionException, MojoFailureException { } } + if (!skipBomCheck && !jsonFoundInBom) { + throw new MojoExecutionException( + "Failed to locate " + jsonArtifact + " in the dependencyManagement section of " + bomArtifact); + } if (quarkusCoreVersion == null) { throw new MojoExecutionException("Failed to determine the Quarkus Core version for " + bomArtifact); } @@ -178,9 +239,9 @@ public void execute() throws MojoExecutionException, MojoFailureException { final JsonObjectBuilder platformJson = Json.createObjectBuilder(); // Add information about the BOM to it final JsonObjectBuilder bomJson = Json.createObjectBuilder(); - bomJson.add(Extension.GROUP_ID, bomGroupId); - bomJson.add(Extension.ARTIFACT_ID, bomArtifactId); - bomJson.add(Extension.VERSION, bomVersion); + bomJson.add(GROUP_ID, bomGroupId); + bomJson.add(ARTIFACT_ID, bomArtifactId); + bomJson.add(VERSION, bomVersion); platformJson.add("bom", bomJson.build()); // Add Quarkus version platformJson.add("quarkus-core-version", quarkusCoreVersion); @@ -218,7 +279,86 @@ public void execute() throws MojoExecutionException, MojoFailureException { } info("Extensions file written to %s", outputFile); - projectHelper.attachArtifact(project, "json", null, outputFile); + // this is necessary to sometimes be able to resolve the artifacts from the workspace + final File published = new File(project.getBuild().getDirectory(), LocalWorkspace.getFileName(jsonArtifact)); + if (!outputDir.equals(published)) { + try { + IoUtils.copy(outputFile.toPath(), published.toPath()); + } catch (IOException e) { + throw new MojoExecutionException("Failed to copy " + outputFile + " to " + published); + } + } + projectHelper.attachArtifact(project, jsonArtifact.getExtension(), jsonArtifact.getClassifier(), published); + } + + private List dependencyManagementFromDescriptor(Artifact bomArtifact) throws MojoExecutionException { + try { + return repoSystem + .readArtifactDescriptor(repoSession, + new ArtifactDescriptorRequest().setRepositories(repos).setArtifact(bomArtifact)) + .getManagedDependencies(); + } catch (ArtifactDescriptorException e) { + throw new MojoExecutionException("Failed to read descriptor of " + bomArtifact, e); + } + } + + private List dependencyManagementFromResolvedPom(Artifact bomArtifact) throws MojoExecutionException { + final Path pomXml; + try { + pomXml = repoSystem + .resolveArtifact(repoSession, new ArtifactRequest().setArtifact(bomArtifact).setRepositories(repos)) + .getArtifact().getFile().toPath(); + } catch (ArtifactResolutionException e) { + throw new MojoExecutionException("Failed to resolve " + bomArtifact, e); + } + return readDependencyManagement(pomXml); + } + + private List dependencyManagementFromProject() throws MojoExecutionException { + // if the configured BOM coordinates are not matching the current project + // the current project's POM isn't the right source + if (!project.getArtifact().getArtifactId().equals(bomArtifactId) + || !project.getArtifact().getVersion().equals(bomVersion) + || !project.getArtifact().getGroupId().equals(bomGroupId) + || !project.getFile().exists()) { + return null; + } + return readDependencyManagement(project.getFile().toPath()); + } + + private List readDependencyManagement(Path pomXml) throws MojoExecutionException { + if (getLog().isDebugEnabled()) { + getLog().debug("Reading dependencyManagement from " + pomXml); + } + final Model bomModel; + try { + bomModel = ModelUtils.readModel(pomXml); + } catch (IOException e) { + throw new MojoExecutionException("Failed to parse " + project.getFile(), e); + } + + // if the POM has a parent then we better resolve the descriptor + if (bomModel.getParent() != null) { + throw new MojoExecutionException(pomXml + + " has a parent, in which case it is recommended to set 'resolveDependencyManagement' parameter to true"); + } + + if (bomModel.getDependencyManagement() == null) { + return Collections.emptyList(); + } + final List modelDeps = bomModel.getDependencyManagement().getDependencies(); + if (modelDeps.isEmpty()) { + return Collections.emptyList(); + } + + final List deps = new ArrayList<>(modelDeps.size()); + for (org.apache.maven.model.Dependency modelDep : modelDeps) { + final Artifact artifact = new DefaultArtifact(modelDep.getGroupId(), modelDep.getArtifactId(), + modelDep.getClassifier(), modelDep.getType(), modelDep.getVersion()); + // exclusions aren't relevant in this context + deps.add(new Dependency(artifact, modelDep.getScope(), modelDep.isOptional(), Collections.emptyList())); + } + return deps; } private JsonObject processDependency(Artifact artifact) throws IOException { @@ -275,8 +415,8 @@ private JsonNode processMetaInfDir(Artifact artifact, Path metaInfDir) final Path props = metaInfDir.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME); if (Files.exists(props)) { return mapper.createObjectNode() - .put(Extension.ARTIFACT_ID, artifact.getArtifactId()) - .put(Extension.GROUP_ID, artifact.getGroupId()) + .put(ARTIFACT_ID, artifact.getArtifactId()) + .put(GROUP_ID, artifact.getGroupId()) .put("version", artifact.getVersion()) .put("name", artifact.getArtifactId()); } else { @@ -294,7 +434,7 @@ private JsonNode processPlatformArtifact(Artifact artifact, Path descriptor, Obj try (InputStream is = Files.newInputStream(descriptor)) { ObjectNode object = mapper.readValue(is, ObjectNode.class); transformLegacyToNew(object, mapper); - debug("Adding Quarkus extension %s:%s", object.get(Extension.GROUP_ID), object.get(Extension.ARTIFACT_ID)); + debug("Adding Quarkus extension %s:%s", object.get(GROUP_ID), object.get(ARTIFACT_ID)); return object; } catch (IOException io) { throw new IOException("Failed to parse " + descriptor, io); @@ -306,24 +446,27 @@ private ObjectMapper getMapper(boolean yaml) { if (yaml) { YAMLFactory yf = new YAMLFactory(); return new ObjectMapper(yf) - .setPropertyNamingStrategy(PropertyNamingStrategy.KEBAB_CASE); + .setPropertyNamingStrategy(PropertyNamingStrategies.KEBAB_CASE); } else { - return new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT) - .enable(JsonParser.Feature.ALLOW_COMMENTS).enable(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS) - .setPropertyNamingStrategy(PropertyNamingStrategy.KEBAB_CASE); + return JsonMapper.builder() + .enable(SerializationFeature.INDENT_OUTPUT) + .enable(JsonReadFeature.ALLOW_JAVA_COMMENTS) + .enable(JsonReadFeature.ALLOW_LEADING_ZEROS_FOR_NUMBERS) + .propertyNamingStrategy(PropertyNamingStrategies.KEBAB_CASE) + .build(); } } private String extensionId(JsonObject extObject) { - String artId = extObject.getString(Extension.ARTIFACT_ID, ""); + String artId = extObject.getString(ARTIFACT_ID, ""); if (artId.isEmpty()) { getLog().warn("Missing artifactId in extension overrides in " + extObject.toString()); } - String groupId = extObject.getString(Extension.GROUP_ID, ""); + String groupId = extObject.getString(GROUP_ID, ""); if (groupId.isEmpty()) { return artId; } else { - return extObject.getString(Extension.GROUP_ID, "") + ":" + artId; + return extObject.getString(GROUP_ID, "") + ":" + artId; } } @@ -376,12 +519,12 @@ private void transformLegacyToNew(ObjectNode extObject, ObjectMapper mapper) { // just putting it // here for completenes if (extObject.get("groupId") != null) { - extObject.set(Extension.GROUP_ID, extObject.get("groupId")); + extObject.set(GROUP_ID, extObject.get("groupId")); extObject.remove("groupId"); } if (extObject.get("artifactId") != null) { - extObject.set(Extension.ARTIFACT_ID, extObject.get("artifactId")); + extObject.set(ARTIFACT_ID, extObject.get("artifactId")); extObject.remove("artifactId"); } @@ -413,34 +556,34 @@ private void transformLegacyToNew(ObjectNode extObject, ObjectMapper mapper) { public OverrideInfo getOverrideInfo(File overridesFile) throws MojoExecutionException { // Read the overrides file for the extensions (if it exists) - HashMap extOverrides = new HashMap<>(); + HashMap extOverrides = new HashMap<>(); JsonObject theRest = null; if (overridesFile.isFile()) { info("Found overrides file %s", overridesFile); - try (JsonReader jsonReader = Json.createReader(new FileInputStream(overridesFile))) { - JsonObject overridesObject = jsonReader.readObject(); - JsonArray extOverrideObjects = overridesObject.getJsonArray("extensions"); - if (extOverrideObjects != null) { - // Put the extension overrides into a map keyed to their GAV - for (JsonValue val : extOverrideObjects) { - JsonObject extOverrideObject = val.asJsonObject(); - String key = extensionId(extOverrideObject); - extOverrides.put(key, extOverrideObject); + try (FileInputStream fileInputStream = new FileInputStream(overridesFile)) { + try (JsonReader jsonReader = Json.createReader(fileInputStream)) { + JsonObject overridesObject = jsonReader.readObject(); + JsonArray extOverrideObjects = overridesObject.getJsonArray("extensions"); + if (extOverrideObjects != null) { + // Put the extension overrides into a map keyed to their GAV + for (JsonValue val : extOverrideObjects) { + JsonObject extOverrideObject = val.asJsonObject(); + String key = extensionId(extOverrideObject); + extOverrides.put(key, extOverrideObject); + } } - } - theRest = overridesObject; + theRest = overridesObject; + } } catch (IOException e) { throw new MojoExecutionException("Failed to read " + overridesFile, e); } return new OverrideInfo(extOverrides, theRest); - - } else { - throw new MojoExecutionException(overridesFile + " not found."); } + return null; } - private class OverrideInfo { + private static class OverrideInfo { private Map extOverrides; private JsonObject theRest; diff --git a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GeneratePlatformDescriptorJsonMojo.java b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GeneratePlatformDescriptorJsonMojo.java new file mode 100644 index 0000000000000..b90c02ece37fb --- /dev/null +++ b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/GeneratePlatformDescriptorJsonMojo.java @@ -0,0 +1,669 @@ +package io.quarkus.maven; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.FileSystem; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.maven.model.Model; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.Component; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; +import org.apache.maven.project.MavenProject; +import org.apache.maven.project.MavenProjectHelper; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.artifact.Artifact; +import org.eclipse.aether.artifact.DefaultArtifact; +import org.eclipse.aether.graph.Dependency; +import org.eclipse.aether.impl.RemoteRepositoryManager; +import org.eclipse.aether.repository.RemoteRepository; +import org.eclipse.aether.resolution.ArtifactDescriptorException; +import org.eclipse.aether.resolution.ArtifactDescriptorRequest; +import org.eclipse.aether.resolution.ArtifactRequest; +import org.eclipse.aether.resolution.ArtifactResolutionException; +import org.eclipse.aether.resolution.ArtifactResult; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.model.AppArtifact; +import io.quarkus.bootstrap.resolver.AppModelResolverException; +import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; +import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; +import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; +import io.quarkus.bootstrap.resolver.maven.workspace.LocalWorkspace; +import io.quarkus.bootstrap.resolver.maven.workspace.ModelUtils; +import io.quarkus.bootstrap.util.IoUtils; +import io.quarkus.bootstrap.util.ZipUtils; +import io.quarkus.platform.descriptor.ProjectPlatformDescriptorJsonUtil; +import io.quarkus.platform.tools.ToolsConstants; +import io.quarkus.registry.catalog.Category; +import io.quarkus.registry.catalog.Extension; +import io.quarkus.registry.catalog.ExtensionOrigin; +import io.quarkus.registry.catalog.json.JsonCatalogMapperHelper; +import io.quarkus.registry.catalog.json.JsonCategory; +import io.quarkus.registry.catalog.json.JsonExtension; +import io.quarkus.registry.catalog.json.JsonExtensionCatalog; + +/** + * This goal generates a platform JSON descriptor for a given platform BOM. + */ +@Mojo(name = "generate-platform-descriptor-json") +public class GeneratePlatformDescriptorJsonMojo extends AbstractMojo { + + @Parameter(property = "quarkusCoreGroupId", defaultValue = ToolsConstants.QUARKUS_CORE_GROUP_ID) + private String quarkusCoreGroupId; + + @Parameter(property = "quarkusCoreArtifactId", defaultValue = ToolsConstants.QUARKUS_CORE_ARTIFACT_ID) + private String quarkusCoreArtifactId; + + @Parameter(property = "bomGroupId", defaultValue = "${project.groupId}") + private String bomGroupId; + + @Parameter(property = "bomArtifactId", defaultValue = "${project.artifactId}") + private String bomArtifactId; + + @Parameter(property = "bomVersion", defaultValue = "${project.version}") + private String bomVersion; + + /** file used for overrides - overridesFiles takes precedence over this file. **/ + @Parameter(property = "overridesFile", defaultValue = "${project.basedir}/src/main/resources/extensions-overrides.json") + private String overridesFile; + + @Parameter(property = "outputFile", defaultValue = "${project.build.directory}/${project.artifactId}-${project.version}-${project.version}.json") + private File outputFile; + + @Component + private RepositorySystem repoSystem; + + @Component + RemoteRepositoryManager remoteRepoManager; + + @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) + private RepositorySystemSession repoSession; + + @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) + private List repos; + + @Component + private MavenProject project; + @Component + private MavenProjectHelper projectHelper; + + /** + * Group ID's that we know don't contain extensions. This can speed up the process + * by preventing the download of artifacts that are not required. + */ + @Parameter + private Set ignoredGroupIds = new HashSet<>(0); + + /** + * Skips the check for the descriptor's artifactId naming convention + */ + @Parameter + private boolean skipArtifactIdCheck; + + /** + * Skips the check for the BOM to contain the generated platform JSON descriptor + */ + @Parameter(property = "skipBomCheck") + private boolean skipBomCheck; + + /** + * Skips the check for categories referenced from the extensions to be listed in the generated descriptor + */ + @Parameter(property = "skipCategoryCheck") + boolean skipCategoryCheck; + + @Parameter(property = "resolveDependencyManagement") + boolean resolveDependencyManagement; + + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + + final Artifact jsonArtifact = new DefaultArtifact(project.getGroupId(), project.getArtifactId(), project.getVersion(), + "json", project.getVersion()); + if (!skipArtifactIdCheck) { + final String expectedArtifactId = bomArtifactId + BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX; + if (!jsonArtifact.getArtifactId().equals(expectedArtifactId)) { + throw new MojoExecutionException( + "The project's artifactId " + project.getArtifactId() + " is expected to be " + expectedArtifactId); + } + if (!jsonArtifact.getGroupId().equals(bomGroupId)) { + throw new MojoExecutionException("The project's groupId " + project.getGroupId() + + " is expected to match the groupId of the BOM which is " + bomGroupId); + } + if (!jsonArtifact.getVersion().equals(bomVersion)) { + throw new MojoExecutionException("The project's version " + project.getVersion() + + " is expected to match the version of the BOM which is " + bomVersion); + } + } + + // Get the BOM artifact + final DefaultArtifact bomArtifact = new DefaultArtifact(bomGroupId, bomArtifactId, "", "pom", bomVersion); + info("Generating catalog of extensions for %s", bomArtifact); + + // if the BOM is generated and has replaced the original one, to pick up the generated content + // we should read the dependencyManagement from the generated pom.xml + List deps; + if (resolveDependencyManagement) { + getLog().debug("Resolving dependencyManagement from the artifact descriptor"); + deps = dependencyManagementFromDescriptor(bomArtifact); + } else { + deps = dependencyManagementFromProject(); + if (deps == null) { + deps = dependencyManagementFromResolvedPom(bomArtifact); + } + } + if (deps.isEmpty()) { + getLog().warn("BOM " + bomArtifact + " does not include any dependency"); + return; + } + + List allOverrides = new ArrayList<>(); + for (String path : overridesFile.split(",")) { + OverrideInfo overrideInfo = getOverrideInfo(new File(path.trim())); + if (overrideInfo != null) { + allOverrides.add(overrideInfo); + } + } + + final JsonExtensionCatalog platformJson = new JsonExtensionCatalog(); + final String platformId = jsonArtifact.getGroupId() + ":" + jsonArtifact.getArtifactId() + ":" + + jsonArtifact.getClassifier() + + ":" + jsonArtifact.getExtension() + ":" + jsonArtifact.getVersion(); + platformJson.setId(platformId); + platformJson.setBom(ArtifactCoords.pom(bomGroupId, bomArtifactId, bomVersion)); + platformJson.setPlatform(true); + + final List importedDescriptors = deps.stream().filter( + d -> d.getArtifact().getArtifactId().endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX) + && d.getArtifact().getExtension().equals("json") + && !(d.getArtifact().getArtifactId().equals(jsonArtifact.getArtifactId()) + && d.getArtifact().getGroupId().equals(jsonArtifact.getGroupId()))) + .map(d -> new AppArtifact(d.getArtifact().getGroupId(), d.getArtifact().getArtifactId(), + d.getArtifact().getClassifier(), d.getArtifact().getExtension(), d.getArtifact().getVersion())) + .collect(Collectors.toList()); + + Map inheritedExtensions = Collections.emptyMap(); + if (!importedDescriptors.isEmpty()) { + final MavenArtifactResolver mvnResolver; + try { + mvnResolver = MavenArtifactResolver.builder() + .setRepositorySystem(repoSystem) + .setRemoteRepositoryManager(remoteRepoManager) + .setRepositorySystemSession(repoSession) + .setRemoteRepositories(repos) + .setWorkspaceDiscovery(false) + .build(); + } catch (BootstrapMavenException e) { + throw new MojoExecutionException("Failed to initialize Maven artifact resolver", e); + } + final JsonExtensionCatalog baseCatalog; + try { + baseCatalog = ProjectPlatformDescriptorJsonUtil + .resolveCatalog(new BootstrapAppModelResolver(mvnResolver), importedDescriptors); + } catch (AppModelResolverException e) { + throw new MojoExecutionException("Failed to resolver inherited platform descriptor", e); + } + platformJson.setDerivedFrom(baseCatalog.getDerivedFrom()); + platformJson.setCategories(baseCatalog.getCategories()); + + final List extensions = baseCatalog.getExtensions(); + if (!extensions.isEmpty()) { + inheritedExtensions = new HashMap<>(extensions.size()); + for (Extension e : extensions) { + inheritedExtensions.put(e.getArtifact().getKey(), e); + } + } + + platformJson.setMetadata(baseCatalog.getMetadata()); + } + + // Create a JSON array of extension descriptors + final Set referencedCategories = new HashSet<>(); + final List extListJson = new ArrayList<>(); + platformJson.setExtensions(extListJson); + String quarkusCoreVersion = null; + boolean jsonFoundInBom = false; + for (Dependency dep : deps) { + final Artifact artifact = dep.getArtifact(); + + if (!skipBomCheck && !jsonFoundInBom) { + jsonFoundInBom = artifact.getArtifactId().equals(jsonArtifact.getArtifactId()) + && artifact.getGroupId().equals(jsonArtifact.getGroupId()) + && artifact.getExtension().equals(jsonArtifact.getExtension()) + && artifact.getClassifier().equals(jsonArtifact.getClassifier()) + && artifact.getVersion().equals(jsonArtifact.getVersion()); + } + + if (ignoredGroupIds.contains(artifact.getGroupId()) + || !artifact.getExtension().equals("jar") + || "javadoc".equals(artifact.getClassifier()) + || "tests".equals(artifact.getClassifier()) + || "sources".equals(artifact.getClassifier())) { + continue; + } + + if (artifact.getArtifactId().equals(quarkusCoreArtifactId) + && artifact.getGroupId().equals(quarkusCoreGroupId)) { + quarkusCoreVersion = artifact.getVersion(); + } + ArtifactResult resolved = null; + JsonExtension extension = null; + try { + resolved = repoSystem.resolveArtifact(repoSession, + new ArtifactRequest().setRepositories(repos).setArtifact(artifact)); + extension = processDependency(resolved.getArtifact()); + } catch (ArtifactResolutionException e) { + // there are some parent poms that appear as jars for some reason + debug("Failed to resolve dependency %s defined in %s", artifact, bomArtifact); + } catch (IOException e) { + throw new MojoExecutionException("Failed to process dependency " + artifact, e); + } + + if (extension == null) { + continue; + } + + Extension inherited = inheritedExtensions.get(extension.getArtifact().getKey()); + final List origins; + if (inherited != null) { + origins = new ArrayList<>(inherited.getOrigins().size() + 1); + origins.addAll(inherited.getOrigins()); + origins.add(platformJson); + } else { + origins = Arrays.asList(platformJson); + } + extension.setOrigins(origins); + String key = extensionId(extension); + for (OverrideInfo info : allOverrides) { + io.quarkus.registry.catalog.Extension extOverride = info.getExtOverrides().get(key); + if (extOverride != null) { + extension = mergeObject(extension, extOverride); + } + } + extListJson.add(extension); + + if (!skipCategoryCheck) { + try { + @SuppressWarnings("unchecked") + final Collection extCategories = (Collection) extension.getMetadata() + .get("categories"); + if (extCategories != null) { + referencedCategories.addAll(extCategories); + } + } catch (ClassCastException e) { + getLog().warn("Failed to cast the extension categories list to java.util.Collection", e); + } + } + } + + if (!skipBomCheck && !jsonFoundInBom) { + throw new MojoExecutionException( + "Failed to locate " + jsonArtifact + " in the dependencyManagement section of " + bomArtifact); + } + if (quarkusCoreVersion == null) { + throw new MojoExecutionException("Failed to determine the Quarkus Core version for " + bomArtifact); + } + platformJson.setQuarkusCoreVersion(quarkusCoreVersion); + + for (OverrideInfo info : allOverrides) { + if (info.getTheRest() != null) { + if (!info.getTheRest().getCategories().isEmpty()) { + if (platformJson.getCategories().isEmpty()) { + platformJson.setCategories(info.getTheRest().getCategories()); + } else { + info.getTheRest().getCategories().stream().forEach(c -> { + boolean found = false; + for (Category platformC : platformJson.getCategories()) { + if (platformC.getId().equals(c.getId())) { + found = true; + JsonCategory jsonC = (JsonCategory) platformC; + if (c.getDescription() != null) { + jsonC.setDescription(c.getDescription()); + } + if (!c.getMetadata().isEmpty()) { + if (jsonC.getMetadata().isEmpty()) { + jsonC.setMetadata(c.getMetadata()); + } else { + jsonC.getMetadata().putAll(c.getMetadata()); + } + } + if (c.getName() != null) { + jsonC.setName(c.getName()); + } + } + break; + } + if (!found) { + platformJson.getCategories().add(c); + } + }); + } + } + } + if (!info.getTheRest().getMetadata().isEmpty()) { + if (platformJson.getMetadata().isEmpty()) { + platformJson.setMetadata(info.getTheRest().getMetadata()); + } else { + platformJson.getMetadata().putAll(info.getTheRest().getMetadata()); + } + } + } + + // make sure all the categories referenced by extensions are actually present in + // the platform descriptor + if (!skipCategoryCheck) { + final Set catalogCategories = platformJson.getCategories().stream().map(c -> c.getId()) + .collect(Collectors.toSet()); + if (!catalogCategories.containsAll(referencedCategories)) { + final List missing = referencedCategories.stream().filter(c -> !catalogCategories.contains(c)) + .collect(Collectors.toList()); + final StringBuilder buf = new StringBuilder(); + buf.append( + "The following categories referenced from extensions are missing from the generated catalog: "); + buf.append(missing.get(0)); + for (int i = 1; i < missing.size(); ++i) { + buf.append(", ").append(missing.get(i)); + } + throw new MojoExecutionException(buf.toString()); + } + } + + // Write the JSON to the output file + final File outputDir = outputFile.getParentFile(); + if (outputFile.exists()) { + outputFile.delete(); + } else if (!outputDir.exists()) { + if (!outputDir.mkdirs()) { + throw new MojoExecutionException("Failed to create output directory " + outputDir); + } + } + try { + JsonCatalogMapperHelper.serialize(platformJson, outputFile.toPath().getParent().resolve(outputFile.getName())); + } catch (IOException e) { + throw new MojoExecutionException("Failed to persist the platform descriptor", e); + } + info("Extensions file written to %s", outputFile); + + // this is necessary to sometimes be able to resolve the artifacts from the workspace + final File published = new File(project.getBuild().getDirectory(), LocalWorkspace.getFileName(jsonArtifact)); + if (!outputDir.equals(published)) { + try { + IoUtils.copy(outputFile.toPath(), published.toPath()); + } catch (IOException e) { + throw new MojoExecutionException("Failed to copy " + outputFile + " to " + published); + } + } + projectHelper.attachArtifact(project, jsonArtifact.getExtension(), jsonArtifact.getClassifier(), published); + } + + private List dependencyManagementFromDescriptor(Artifact bomArtifact) throws MojoExecutionException { + try { + return repoSystem + .readArtifactDescriptor(repoSession, + new ArtifactDescriptorRequest().setRepositories(repos).setArtifact(bomArtifact)) + .getManagedDependencies(); + } catch (ArtifactDescriptorException e) { + throw new MojoExecutionException("Failed to read descriptor of " + bomArtifact, e); + } + } + + private List dependencyManagementFromResolvedPom(Artifact bomArtifact) throws MojoExecutionException { + final Path pomXml; + try { + pomXml = repoSystem + .resolveArtifact(repoSession, new ArtifactRequest().setArtifact(bomArtifact).setRepositories(repos)) + .getArtifact().getFile().toPath(); + } catch (ArtifactResolutionException e) { + throw new MojoExecutionException("Failed to resolve " + bomArtifact, e); + } + return readDependencyManagement(pomXml); + } + + private List dependencyManagementFromProject() throws MojoExecutionException { + // if the configured BOM coordinates are not matching the current project + // the current project's POM isn't the right source + if (!project.getArtifact().getArtifactId().equals(bomArtifactId) + || !project.getArtifact().getVersion().equals(bomVersion) + || !project.getArtifact().getGroupId().equals(bomGroupId) + || !project.getFile().exists()) { + return null; + } + return readDependencyManagement(project.getFile().toPath()); + } + + private List readDependencyManagement(Path pomXml) throws MojoExecutionException { + if (getLog().isDebugEnabled()) { + getLog().debug("Reading dependencyManagement from " + pomXml); + } + final Model bomModel; + try { + bomModel = ModelUtils.readModel(pomXml); + } catch (IOException e) { + throw new MojoExecutionException("Failed to parse " + project.getFile(), e); + } + + // if the POM has a parent then we better resolve the descriptor + if (bomModel.getParent() != null) { + throw new MojoExecutionException(pomXml + + " has a parent, in which case it is recommended to set 'resolveDependencyManagement' parameter to true"); + } + + if (bomModel.getDependencyManagement() == null) { + return Collections.emptyList(); + } + final List modelDeps = bomModel.getDependencyManagement().getDependencies(); + if (modelDeps.isEmpty()) { + return Collections.emptyList(); + } + + final List deps = new ArrayList<>(modelDeps.size()); + for (org.apache.maven.model.Dependency modelDep : modelDeps) { + final Artifact artifact = new DefaultArtifact(modelDep.getGroupId(), modelDep.getArtifactId(), + modelDep.getClassifier(), modelDep.getType(), modelDep.getVersion()); + // exclusions aren't relevant in this context + deps.add(new Dependency(artifact, modelDep.getScope(), modelDep.isOptional(), Collections.emptyList())); + } + return deps; + } + + private JsonExtension processDependency(Artifact artifact) throws IOException { + return processDependencyToObjectNode(artifact); + } + + private JsonExtension processDependencyToObjectNode(Artifact artifact) throws IOException { + final Path path = artifact.getFile().toPath(); + if (Files.isDirectory(path)) { + return processMetaInfDir(artifact, path.resolve(BootstrapConstants.META_INF)); + } else { + try (FileSystem artifactFs = ZipUtils.newFileSystem(path)) { + return processMetaInfDir(artifact, artifactFs.getPath(BootstrapConstants.META_INF)); + } + } + } + + /** + * Load and return javax.jsonObject based on yaml, json or properties file. + * + * @param artifact + * @param metaInfDir + * @return + * @throws IOException + */ + private JsonExtension processMetaInfDir(Artifact artifact, Path metaInfDir) + throws IOException { + + ObjectMapper mapper = null; + + if (!Files.exists(metaInfDir)) { + return null; + } + Path jsonOrYaml = null; + + Path yaml = metaInfDir.resolve(BootstrapConstants.QUARKUS_EXTENSION_FILE_NAME); + if (Files.exists(yaml)) { + mapper = getMapper(true); + jsonOrYaml = yaml; + } else { + mapper = getMapper(false); + Path json = metaInfDir.resolve(BootstrapConstants.EXTENSION_PROPS_JSON_FILE_NAME); + if (!Files.exists(json)) { + final Path props = metaInfDir.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME); + if (Files.exists(props)) { + final JsonExtension e = new JsonExtension(); + e.setArtifact(new ArtifactCoords(artifact.getGroupId(), artifact.getArtifactId(), artifact.getClassifier(), + artifact.getExtension(), artifact.getVersion())); + e.setName(artifact.getArtifactId()); + return e; + } + return null; + } else { + jsonOrYaml = json; + } + } + return processPlatformArtifact(artifact, jsonOrYaml, mapper); + } + + private JsonExtension processPlatformArtifact(Artifact artifact, Path descriptor, ObjectMapper mapper) + throws IOException { + try (InputStream is = Files.newInputStream(descriptor)) { + JsonExtension legacy = mapper.readValue(is, JsonExtension.class); + JsonExtension object = transformLegacyToNew(legacy); + debug("Adding Quarkus extension %s", object.getArtifact()); + return object; + } catch (IOException io) { + throw new IOException("Failed to parse " + descriptor, io); + } + } + + private ObjectMapper getMapper(boolean yaml) { + + if (yaml) { + YAMLFactory yf = new YAMLFactory(); + return JsonCatalogMapperHelper.initMapper(new ObjectMapper(yf)); + } else { + return JsonCatalogMapperHelper.mapper(); + } + } + + private String extensionId(io.quarkus.registry.catalog.Extension extObject) { + return extObject.getArtifact().getGroupId() + ":" + extObject.getArtifact().getArtifactId(); + } + + private JsonExtension mergeObject(JsonExtension extObject, io.quarkus.registry.catalog.Extension extOverride) { + if (extOverride.getArtifact() != null) { + extObject.setArtifact(extOverride.getArtifact()); + } + if (!extOverride.getMetadata().isEmpty()) { + if (extObject.getMetadata().isEmpty()) { + extObject.setMetadata(extOverride.getMetadata()); + } else { + extObject.getMetadata().putAll(extOverride.getMetadata()); + } + } + if (extOverride.getName() != null) { + extObject.setName(extOverride.getName()); + } + if (!extOverride.getOrigins().isEmpty()) { + extObject.setOrigins(extOverride.getOrigins()); + } + return extObject; + } + + private void info(String msg, Object... args) { + if (!getLog().isInfoEnabled()) { + return; + } + if (args.length == 0) { + getLog().info(msg); + return; + } + getLog().info(String.format(msg, args)); + } + + private void debug(String msg, Object... args) { + if (!getLog().isDebugEnabled()) { + return; + } + if (args.length == 0) { + getLog().debug(msg); + return; + } + getLog().debug(String.format(msg, args)); + } + + private JsonExtension transformLegacyToNew(JsonExtension extObject) { + final Map metadata = extObject.getMetadata(); + final Object labels = metadata.get("labels"); + if (labels != null) { + metadata.put("keywords", labels); + metadata.remove("labels"); + } + return extObject; + } + + public OverrideInfo getOverrideInfo(File overridesFile) throws MojoExecutionException { + // Read the overrides file for the extensions (if it exists) + HashMap extOverrides = new HashMap<>(); + JsonExtensionCatalog theRest = null; + if (overridesFile.isFile()) { + info("Found overrides file %s", overridesFile); + try { + JsonExtensionCatalog overridesObject = JsonCatalogMapperHelper.deserialize(overridesFile.toPath(), + JsonExtensionCatalog.class); + List extensionsOverrides = overridesObject.getExtensions(); + if (!extensionsOverrides.isEmpty()) { + // Put the extension overrides into a map keyed to their GAV + for (io.quarkus.registry.catalog.Extension extOverride : extensionsOverrides) { + String key = extensionId(extOverride); + extOverrides.put(key, extOverride); + } + } + + theRest = overridesObject; + } catch (IOException e) { + throw new MojoExecutionException("Failed to read " + overridesFile, e); + } + return new OverrideInfo(extOverrides, theRest); + } + return null; + } + + private static class OverrideInfo { + private Map extOverrides; + private JsonExtensionCatalog theRest; + + public OverrideInfo(Map extOverrides, + JsonExtensionCatalog theRest) { + this.extOverrides = extOverrides; + this.theRest = theRest; + } + + public Map getExtOverrides() { + return extOverrides; + } + + public JsonExtensionCatalog getTheRest() { + return theRest; + } + } +} diff --git a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/MojoLogger.java b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/MojoLogger.java deleted file mode 100644 index a903de275476f..0000000000000 --- a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/MojoLogger.java +++ /dev/null @@ -1,202 +0,0 @@ -package io.quarkus.maven; - -import java.text.MessageFormat; -import java.util.Collections; -import java.util.Map; -import java.util.function.Supplier; - -import org.apache.maven.plugin.logging.Log; -import org.apache.maven.shared.utils.logging.MessageBuilder; -import org.apache.maven.shared.utils.logging.MessageUtils; -import org.jboss.logging.Logger; -import org.jboss.logging.LoggerProvider; -import org.wildfly.common.Assert; - -/** - */ -public class MojoLogger implements LoggerProvider { - static final Object[] NO_PARAMS = new Object[0]; - - public static volatile Supplier logSupplier; - - @Override - public Logger getLogger(final String name) { - return new Logger(name) { - @Override - protected void doLog(final Level level, final String loggerClassName, final Object message, - final Object[] parameters, final Throwable thrown) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier != null) { - Log log = logSupplier.get(); - String text; - if (parameters == null || parameters.length == 0) { - text = String.valueOf(message); - } else { - try { - text = MessageFormat.format(String.valueOf(message), parameters); - } catch (Exception e) { - text = invalidFormat(String.valueOf(message), parameters); - } - } - synchronized (MojoLogger.class) { - doActualLog(log, level, text, thrown); - } - } - } - - @Override - protected void doLogf(final Level level, final String loggerClassName, final String format, - final Object[] parameters, final Throwable thrown) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier != null) { - Log log = logSupplier.get(); - String text; - if (parameters == null) { - try { - //noinspection RedundantStringFormatCall - text = String.format(format); - } catch (Exception e) { - text = invalidFormat(format, NO_PARAMS); - } - } else { - try { - text = String.format(format, (Object[]) parameters); - } catch (Exception e) { - text = invalidFormat(format, parameters); - } - } - synchronized (MojoLogger.class) { - doActualLog(log, level, text, thrown); - } - } - } - - @Override - public boolean isEnabled(final Level level) { - final Supplier logSupplier = MojoLogger.logSupplier; - if (logSupplier == null) - return false; - Log log = logSupplier.get(); - switch (level) { - case FATAL: - case ERROR: - return log.isErrorEnabled(); - case WARN: - return log.isWarnEnabled(); - case INFO: - return log.isInfoEnabled(); - default: - return log.isDebugEnabled(); - } - } - - void doActualLog(final Log log, final Level level, final String message, final Throwable thrown) { - final MessageBuilder buffer = MessageUtils.buffer(); - // style options are limited unless we crack into jansi ourselves - buffer.strong("[").project(name).strong("]").a(" ").a(message); - if (thrown != null) { - switch (level) { - case FATAL: - case ERROR: - log.error(buffer.toString(), thrown); - break; - case WARN: - log.warn(buffer.toString(), thrown); - break; - case INFO: - log.info(buffer.toString(), thrown); - break; - default: - log.debug(buffer.toString(), thrown); - break; - } - } else { - switch (level) { - case FATAL: - case ERROR: - log.error(buffer.toString()); - break; - case WARN: - log.warn(buffer.toString()); - break; - case INFO: - log.info(buffer.toString()); - break; - default: - log.debug(buffer.toString()); - break; - } - } - } - }; - } - - String invalidFormat(final String format, final Object[] parameters) { - final StringBuilder b = new StringBuilder("** invalid format \'" + format + "\'"); - if (parameters != null && parameters.length > 0) { - b.append(" [").append(parameters[0]); - for (int i = 1; i < parameters.length; i++) { - b.append(',').append(parameters[i]); - } - b.append("]"); - } - return b.toString(); - } - - @Override - public void clearMdc() { - } - - @Override - public Object putMdc(final String key, final Object value) { - //throw Assert.unsupported(); - return null; - } - - @Override - public Object getMdc(final String key) { - return null; - } - - @Override - public void removeMdc(final String key) { - } - - @Override - public Map getMdcMap() { - return Collections.emptyMap(); - } - - @Override - public void clearNdc() { - } - - @Override - public String getNdc() { - return ""; - } - - @Override - public int getNdcDepth() { - return 0; - } - - @Override - public String popNdc() { - return ""; - } - - @Override - public String peekNdc() { - return ""; - } - - @Override - public void pushNdc(final String message) { - throw Assert.unsupported(); - } - - @Override - public void setNdcMaxDepth(final int maxDepth) { - } -} diff --git a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/PlatformPropertiesMojo.java b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/PlatformPropertiesMojo.java new file mode 100644 index 0000000000000..1404e655e3a9b --- /dev/null +++ b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/PlatformPropertiesMojo.java @@ -0,0 +1,118 @@ +package io.quarkus.maven; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Properties; + +import org.apache.maven.model.Dependency; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.Component; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; +import org.apache.maven.project.MavenProject; +import org.apache.maven.project.MavenProjectHelper; + +import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.util.IoUtils; + +@Mojo(name = "platform-properties", threadSafe = true) +public class PlatformPropertiesMojo extends AbstractMojo { + + @Component + private MavenProjectHelper projectHelper; + + @Parameter(defaultValue = "${project}", readonly = true, required = true) + private MavenProject project; + + /** + * Skip the execution of this mojo + */ + @Parameter(defaultValue = "true", property = "platformPropertiesInBom") + private boolean platformPropertiesInBom = true; + + @Parameter(defaultValue = "platform-properties.properties", property = "platformPropertiesFileName") + private String propertiesFileName = "platform-properties.properties"; + + @Parameter(property = "skipPlatformPrefixCheck") + boolean skipPlatformPrefixCheck; + + @Override + public void execute() throws MojoExecutionException, MojoFailureException { + + if (!project.getArtifactId().endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) { + throw new MojoExecutionException("Project's artifactId " + project.getArtifactId() + " does not end with " + + BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX); + } + + final File propsFile = new File(project.getBuild().getOutputDirectory(), propertiesFileName); + if (!propsFile.exists()) { + throw new MojoExecutionException("Failed to locate " + propsFile); + } + + if (!skipPlatformPrefixCheck) { + final Properties props = new Properties(); + try (InputStream is = new FileInputStream(propsFile)) { + props.load(is); + } catch (IOException e) { + throw new MojoExecutionException("Failed to read properties " + propsFile, e); + } + StringBuilder buf = null; + for (Object name : props.keySet()) { + if (!String.valueOf(name).startsWith(BootstrapConstants.PLATFORM_PROPERTY_PREFIX)) { + if (buf == null) { + buf = new StringBuilder() + .append("The following platform properties are missing the '") + .append(BootstrapConstants.PLATFORM_PROPERTY_PREFIX) + .append("' prefix: "); + } else { + buf.append(", "); + } + buf.append(name); + } + } + if (buf != null) { + throw new MojoExecutionException(buf.toString()); + } + } + + if (platformPropertiesInBom) { + assertPlatformPropertiesInBom(); + } + + // this is necessary to sometimes be able to resolve the artifacts from the workspace + final File published = new File(project.getBuild().getDirectory(), + project.getArtifactId() + "-" + project.getVersion() + ".properties"); + try { + IoUtils.copy(propsFile.toPath(), published.toPath()); + } catch (IOException e) { + throw new MojoExecutionException("Failed to copy " + propsFile + " to " + published, e); + } + + projectHelper.attachArtifact(project, "properties", published); + } + + private void assertPlatformPropertiesInBom() throws MojoExecutionException { + boolean platformPropsManaged = false; + for (Dependency dep : dependencyManagement()) { + if (dep.getArtifactId().equals(project.getArtifactId()) + && dep.getGroupId().equals(project.getGroupId()) + && dep.getVersion().equals(project.getVersion())) { + platformPropsManaged = true; + break; + } + } + if (!platformPropsManaged) { + throw new MojoExecutionException("The project's dependencyManagement does not appear to include " + + project.getGroupId() + ":" + project.getArtifactId() + ":" + project.getVersion()); + } + } + + private List dependencyManagement() { + return project.getDependencyManagement() == null ? null : project.getDependencyManagement().getDependencies(); + } +} diff --git a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/ValidateExtensionsJsonMojo.java b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/ValidateExtensionsJsonMojo.java index c7e0df7f7d33a..88cb63cc48abf 100644 --- a/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/ValidateExtensionsJsonMojo.java +++ b/devtools/platform-descriptor-json-plugin/src/main/java/io/quarkus/maven/ValidateExtensionsJsonMojo.java @@ -9,8 +9,10 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; @@ -23,14 +25,15 @@ import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.graph.Dependency; +import org.eclipse.aether.impl.RemoteRepositoryManager; import org.eclipse.aether.repository.RemoteRepository; import io.quarkus.bootstrap.BootstrapConstants; -import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; +import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; -import io.quarkus.dependencies.Extension; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.resolver.json.QuarkusJsonPlatformDescriptorResolver; +import io.quarkus.registry.catalog.Extension; +import io.quarkus.registry.catalog.json.JsonCatalogMapperHelper; +import io.quarkus.registry.catalog.json.JsonExtensionCatalog; /** * This goal validates a given JSON descriptor. @@ -63,12 +66,22 @@ public class ValidateExtensionsJsonMojo extends AbstractMojo { @Component private RepositorySystem repoSystem; + @Component + private RemoteRepositoryManager remoteRepoManager; + @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) private RepositorySystemSession repoSession; @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) private List repos; + /** + * Group ID's that we know don't contain extensions. This can speed up the process + * by preventing the download of artifacts that are not required. + */ + @Parameter + private Set ignoredGroupIds = new HashSet<>(0); + @Override public void execute() throws MojoExecutionException, MojoFailureException { if (skip) { @@ -82,22 +95,35 @@ public void execute() throws MojoExecutionException, MojoFailureException { .setRepositorySystem(repoSystem) .setRepositorySystemSession(repoSession) .setRemoteRepositories(repos) + .setRemoteRepositoryManager(remoteRepoManager) .build(); } catch (Exception e) { throw new MojoExecutionException("Failed to initialize maven artifact resolver", e); } - final QuarkusPlatformDescriptor descriptor = QuarkusJsonPlatformDescriptorResolver.newInstance() - .setArtifactResolver(new BootstrapAppModelResolver(mvn)) - .resolveFromJson(jsonGroupId, jsonArtifactId, jsonVersion); + final Artifact artifact = new DefaultArtifact(jsonGroupId, jsonArtifactId, jsonVersion, "json", jsonVersion); + final Path jsonPath; + try { + jsonPath = mvn.resolve(artifact).getArtifact().getFile().toPath(); + } catch (BootstrapMavenException e) { + throw new MojoExecutionException("Failed to resolve platform descriptor " + artifact, e); + } + + JsonExtensionCatalog catalog; + try { + catalog = JsonCatalogMapperHelper.deserialize(jsonPath, JsonExtensionCatalog.class); + } catch (IOException e) { + throw new MojoExecutionException("Failed to deserialize extension catalog " + jsonPath, e); + } + final ArtifactCoords bomCoords = catalog.getBom(); - final DefaultArtifact bomArtifact = new DefaultArtifact(descriptor.getBomGroupId(), - descriptor.getBomArtifactId(), null, "pom", descriptor.getBomVersion()); + final DefaultArtifact bomArtifact = new DefaultArtifact(bomCoords.getGroupId(), + bomCoords.getArtifactId(), bomCoords.getClassifier(), bomCoords.getType(), bomCoords.getVersion()); final Map bomExtensions = collectBomExtensions(mvn, bomArtifact); List missingFromBom = Collections.emptyList(); - for (Extension ext : descriptor.getExtensions()) { - if (bomExtensions.remove(ext.getGroupId() + ":" + ext.getArtifactId()) == null) { + for (Extension ext : catalog.getExtensions()) { + if (bomExtensions.remove(ext.getArtifact().getGroupId() + ":" + ext.getArtifact().getArtifactId()) == null) { if (missingFromBom.isEmpty()) { missingFromBom = new ArrayList<>(); } @@ -120,8 +146,7 @@ public void execute() throws MojoExecutionException, MojoFailureException { getLog().error("Extensions from " + jsonGroupId + ":" + jsonArtifactId + ":" + jsonVersion + " missing from " + bomArtifact); for (Extension e : missingFromBom) { - getLog().error("- " + e.getGroupId() + ":" + e.getArtifactId() + ":" + e.getClassifier() + ":" + e.getType() - + ":" + e.getVersion()); + getLog().error("- " + e.getArtifact()); } } throw new MojoExecutionException("Extensions referenced in " + bomArtifact + " and included in " + jsonGroupId + ":" @@ -142,7 +167,8 @@ private Map collectBomExtensions(MavenArtifactResolver mvn, fi for (Dependency dep : bomDeps) { final Artifact artifact = dep.getArtifact(); - if (!artifact.getExtension().equals("jar") + if (ignoredGroupIds.contains(artifact.getGroupId()) + || !artifact.getExtension().equals("jar") || "javadoc".equals(artifact.getClassifier()) || "tests".equals(artifact.getClassifier()) || "sources".equals(artifact.getClassifier())) { diff --git a/devtools/platform-descriptor-json/pom.xml b/devtools/platform-descriptor-json/pom.xml index 23d85256d3f0e..269ae990b3ab7 100644 --- a/devtools/platform-descriptor-json/pom.xml +++ b/devtools/platform-descriptor-json/pom.xml @@ -59,9 +59,10 @@ io.quarkus - quarkus-bom-descriptor-json + quarkus-bom-quarkus-platform-descriptor ${project.version} json + ${project.version} ${project.build.outputDirectory}/quarkus-bom-descriptor extensions.json @@ -106,23 +107,10 @@ io.quarkus - quarkus-devtools-common - - - io.quarkus - quarkus-bom-descriptor-json + quarkus-bom-quarkus-platform-descriptor ${project.version} json - - - - com.fasterxml.jackson.core - jackson-databind - - - org.junit.jupiter - junit-jupiter - test + ${project.version} diff --git a/devtools/platform-descriptor-json/src/main/filtered/quarkus.properties b/devtools/platform-descriptor-json/src/main/filtered/quarkus.properties index 75b90715a75f6..2c025575f5827 100644 --- a/devtools/platform-descriptor-json/src/main/filtered/quarkus.properties +++ b/devtools/platform-descriptor-json/src/main/filtered/quarkus.properties @@ -8,10 +8,16 @@ kotlin-version=${kotlin.version} scala-version=${scala.version} scala-plugin-version=${scala-plugin.version} -# Plugin metadata -plugin-groupId=${project.groupId} -plugin-artifactId=quarkus-maven-plugin -plugin-version=${project.version} +quarkus-core-version=${project.version} + +# Maven plugin metadata +maven-plugin-groupId=${project.groupId} +maven-plugin-artifactId=quarkus-maven-plugin +maven-plugin-version=${project.version} + +# Gradle plugin metadata +gradle-plugin-id=io.quarkus +gradle-plugin-version=${project.version} supported-maven-versions=${supported-maven-versions} # the proposed version must be in the range of the supported versions diff --git a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformBom.java b/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformBom.java deleted file mode 100644 index 691a672027932..0000000000000 --- a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformBom.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.quarkus.platform.descriptor.loader.json.impl; - -public class QuarkusJsonPlatformBom { - - public String groupId; - public String artifactId; - public String version; -} diff --git a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptor.java b/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptor.java deleted file mode 100644 index d346fd6fd0759..0000000000000 --- a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptor.java +++ /dev/null @@ -1,152 +0,0 @@ -package io.quarkus.platform.descriptor.loader.json.impl; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import org.apache.maven.model.Dependency; - -import io.quarkus.dependencies.Category; -import io.quarkus.dependencies.Extension; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.ResourceInputStreamConsumer; -import io.quarkus.platform.descriptor.loader.json.ResourceLoader; -import io.quarkus.platform.tools.DefaultMessageWriter; -import io.quarkus.platform.tools.MessageWriter; - -public class QuarkusJsonPlatformDescriptor implements QuarkusPlatformDescriptor, Serializable { - - private String bomGroupId; - private String bomArtifactId; - private String bomVersion; - private String quarkusVersion; - - private List extensions = Collections.emptyList(); - private List managedDeps = Collections.emptyList(); - private List categories = Collections.emptyList(); - private transient ResourceLoader resourceLoader; - private transient MessageWriter log; - - public QuarkusJsonPlatformDescriptor() { - } - - public void setBom(QuarkusJsonPlatformBom bom) { - bomGroupId = bom.groupId; - bomArtifactId = bom.artifactId; - bomVersion = bom.version; - } - - public void setQuarkusCoreVersion(String quarkusVersion) { - this.quarkusVersion = quarkusVersion; - } - - public void setExtensions(List extensions) { - this.extensions = extensions; - } - - void setManagedDependencies(List managedDeps) { - this.managedDeps = managedDeps; - } - - void setResourceLoader(ResourceLoader resourceLoader) { - this.resourceLoader = resourceLoader; - } - - void setMessageWriter(MessageWriter log) { - this.log = log; - } - - void setQuarkusVersion(String quarkusVersion) { - this.quarkusVersion = quarkusVersion; - } - - private MessageWriter getLog() { - return log == null ? log = new DefaultMessageWriter() : log; - } - - @Override - public String getBomGroupId() { - return bomGroupId; - } - - @Override - public String getBomArtifactId() { - return bomArtifactId; - } - - @Override - public String getBomVersion() { - return bomVersion; - } - - @Override - public String getQuarkusVersion() { - return quarkusVersion; - } - - @Override - public List getManagedDependencies() { - return managedDeps; - } - - @Override - public List getExtensions() { - return extensions; - } - - @Override - public String getTemplate(String name) { - getLog().debug("Loading Quarkus project template %s", name); - if (resourceLoader == null) { - throw new IllegalStateException("Resource loader has not been provided"); - } - try { - return resourceLoader.loadResource(name, is -> { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { - return reader.lines().collect(Collectors.joining("\n")); - } - }); - } catch (IOException e) { - throw new IllegalStateException("Failed to load " + name, e); - } - } - - @Override - public T loadResource(String name, ResourceInputStreamConsumer consumer) throws IOException { - getLog().debug("Loading Quarkus platform resource %s", name); - if (resourceLoader == null) { - throw new IllegalStateException("Resource loader has not been provided"); - } - return resourceLoader.loadResource(name, consumer); - } - - @Override - public List getCategories() { - return categories; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - QuarkusJsonPlatformDescriptor that = (QuarkusJsonPlatformDescriptor) o; - return bomGroupId.equals(that.bomGroupId) && - bomArtifactId.equals(that.bomArtifactId) && - bomVersion.equals(that.bomVersion); - } - - @Override - public int hashCode() { - return Objects.hash(bomGroupId, bomArtifactId, bomVersion); - } -} diff --git a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderBootstrap.java b/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderBootstrap.java deleted file mode 100644 index 1cb07ab2a4d51..0000000000000 --- a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderBootstrap.java +++ /dev/null @@ -1,114 +0,0 @@ -package io.quarkus.platform.descriptor.loader.json.impl; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Properties; -import java.util.function.Function; - -import org.apache.maven.model.Dependency; - -import io.quarkus.maven.utilities.MojoUtils; -import io.quarkus.platform.descriptor.QuarkusPlatformDescriptor; -import io.quarkus.platform.descriptor.loader.QuarkusPlatformDescriptorLoader; -import io.quarkus.platform.descriptor.loader.QuarkusPlatformDescriptorLoaderContext; -import io.quarkus.platform.descriptor.loader.json.ArtifactResolver; -import io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoaderContext; - -/** - * This class is used to bootstrap the Quarkus platform descriptor from the classpath only. - */ -public class QuarkusJsonPlatformDescriptorLoaderBootstrap - implements QuarkusPlatformDescriptorLoader { - - private static InputStream getResourceStream(String relativePath) { - final InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(relativePath); - if (is == null) { - throw new IllegalStateException("Failed to locate " + relativePath + " on the classpath"); - } - return is; - } - - @Override - public QuarkusPlatformDescriptor load(QuarkusPlatformDescriptorLoaderContext context) { - - context.getMessageWriter().debug("Loading the default Quarkus Platform descriptor from the classpath"); - - final Properties props = new Properties(); - final InputStream quarkusProps = getResourceStream("quarkus.properties"); - try { - props.load(quarkusProps); - } catch (IOException e) { - throw new IllegalStateException("Failed to load properties quarkus.properties", e); - } - - final Path resourceRoot; - try { - resourceRoot = MojoUtils.getClassOrigin(QuarkusJsonPlatformDescriptorLoaderBootstrap.class); - } catch (IOException e) { - throw new IllegalStateException("Failed to determine the resource root for " + getClass().getName(), e); - } - - final ArtifactResolver resolver = new ArtifactResolver() { - @Override - public T process(String groupId, String artifactId, String classifier, String type, String version, - Function processor) { - throw new UnsupportedOperationException(); - } - - @Override - public List getManagedDependencies(String groupId, String artifactId, String classifier, String type, - String version) { - if (Files.isDirectory(resourceRoot)) { - return readManagedDeps(resourceRoot.resolve("quarkus-bom/pom.xml")); - } - try (FileSystem fs = FileSystems.newFileSystem(resourceRoot, null)) { - return readManagedDeps(fs.getPath("/quarkus-bom/pom.xml")); - } catch (Exception e) { - throw new IllegalStateException("Failed to open " + resourceRoot, e); - } - } - }; - - return new QuarkusJsonPlatformDescriptorLoaderImpl() - .load(new QuarkusJsonPlatformDescriptorLoaderContext(resolver, context.getMessageWriter()) { - @Override - public T parseJson(Function parser) { - if (Files.isDirectory(resourceRoot)) { - return doParse(resourceRoot.resolve("quarkus-bom-descriptor/extensions.json"), parser); - } - try (FileSystem fs = FileSystems.newFileSystem(resourceRoot, null)) { - return doParse(fs.getPath("/quarkus-bom-descriptor/extensions.json"), parser); - } catch (IOException e) { - throw new IllegalStateException("Failed to open " + resourceRoot, e); - } - } - }); - } - - private static List readManagedDeps(Path pom) { - if (!Files.exists(pom)) { - throw new IllegalStateException("Failed to locate " + pom); - } - try (InputStream is = Files.newInputStream(pom)) { - return MojoUtils.readPom(is).getDependencyManagement().getDependencies(); - } catch (IOException e) { - throw new IllegalStateException("Failed to read model of " + pom, e); - } - } - - private static T doParse(Path p, Function parser) { - if (!Files.exists(p)) { - throw new IllegalStateException("Path does not exist: " + p); - } - try (InputStream is = Files.newInputStream(p)) { - return parser.apply(is); - } catch (IOException e) { - throw new IllegalStateException("Failed to read " + p, e); - } - } -} diff --git a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderImpl.java b/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderImpl.java deleted file mode 100644 index 50fbac1d4a7e5..0000000000000 --- a/devtools/platform-descriptor-json/src/main/java/io/quarkus/platform/descriptor/loader/json/impl/QuarkusJsonPlatformDescriptorLoaderImpl.java +++ /dev/null @@ -1,40 +0,0 @@ -package io.quarkus.platform.descriptor.loader.json.impl; - -import java.io.IOException; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; - -import io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoader; -import io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoaderContext; - -public class QuarkusJsonPlatformDescriptorLoaderImpl - implements QuarkusJsonPlatformDescriptorLoader { - - @Override - public QuarkusJsonPlatformDescriptor load(final QuarkusJsonPlatformDescriptorLoaderContext context) { - - final QuarkusJsonPlatformDescriptor platform = context - .parseJson(is -> { - try { - ObjectMapper mapper = new ObjectMapper() - .enable(JsonParser.Feature.ALLOW_COMMENTS) - .enable(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS) - .setPropertyNamingStrategy(PropertyNamingStrategy.KEBAB_CASE); - return mapper.readValue(is, QuarkusJsonPlatformDescriptor.class); - } catch (IOException e) { - throw new RuntimeException("Failed to parse JSON stream", e); - } - }); - - if (context.getArtifactResolver() != null) { - platform.setManagedDependencies(context.getArtifactResolver().getManagedDependencies(platform.getBomGroupId(), - platform.getBomArtifactId(), null, "pom", platform.getBomVersion())); - } - platform.setResourceLoader(context.getResourceLoader()); - platform.setMessageWriter(context.getMessageWriter()); - - return platform; - } -} diff --git a/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.QuarkusPlatformDescriptorLoader b/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.QuarkusPlatformDescriptorLoader deleted file mode 100644 index 54b44251eaa4f..0000000000000 --- a/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.QuarkusPlatformDescriptorLoader +++ /dev/null @@ -1 +0,0 @@ -io.quarkus.platform.descriptor.loader.json.impl.QuarkusJsonPlatformDescriptorLoaderBootstrap \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoader b/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoader deleted file mode 100644 index 067557f6396e3..0000000000000 --- a/devtools/platform-descriptor-json/src/main/resources/META-INF/services/io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoader +++ /dev/null @@ -1 +0,0 @@ -io.quarkus.platform.descriptor.loader.json.impl.QuarkusJsonPlatformDescriptorLoaderImpl \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/base/README.tpl.qute.md new file mode 100644 index 0000000000000..10b5fa998714a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/base/README.tpl.qute.md @@ -0,0 +1,11 @@ +## Greet the world! + +```shell script +./jbang src/GreetingCommand.java hello +``` + +## Show help + +```shell script +./jbang src/GreetingCommand.java -h +``` diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/codestart.yml new file mode 100644 index 0000000000000..56accde672505 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/codestart.yml @@ -0,0 +1,19 @@ +name: jbang-picocli-code +ref: picocli +type: code +language: + base: + data: + command: + class-name: GreetingCommand + subcommands: "{HelloCommand.class, GoodByeCommand.class}" + hello: + name: "hello" + description: "Greet World!" + message: "Hello World!" + goodbye: + name: "goodbye" + description: "Say goodbye to World!" + message: "Goodbye World!" + dependencies: + - io.quarkus:quarkus-picocli diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/java/src/{command.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/java/src/{command.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..a61da811b5e39 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-picocli-code/java/src/{command.class-name}.tpl.qute.java @@ -0,0 +1,33 @@ +//usr/bin/env jbang "$0" "$@" ; exit $? +//DEPS {quarkus.bom.group-id}:{quarkus.bom.artifact-id}:{quarkus.bom.version}@pom +{#for dep in dependencies} +//DEPS {dep} +{/for} + +//JAVAC_OPTIONS -parameters +// +import io.quarkus.picocli.runtime.annotations.TopCommand; +import picocli.CommandLine; + +@TopCommand +@CommandLine.Command(mixinStandardHelpOptions = true, subcommands = {command.subcommands}) +public class {command.class-name} { +} + +@CommandLine.Command(name = "{command.hello.name}", description = "{command.hello.description}") +class HelloCommand implements Runnable { + + @Override + public void run() { + System.out.println("{command.hello.message}"); + } +} + +@CommandLine.Command(name = "{command.goodbye.name}", description = "{command.goodbye.description}") +class GoodByeCommand implements Runnable { + + @Override + public void run() { + System.out.println("{command.goodbye.message}"); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/base/README.tpl.qute.md new file mode 100644 index 0000000000000..bb72b9b43ac54 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/base/README.tpl.qute.md @@ -0,0 +1,3 @@ +```shell script +./jbang src/GreetingResource.java +``` diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/codestart.yml new file mode 100644 index 0000000000000..dd495fc6e7c3d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/codestart.yml @@ -0,0 +1,13 @@ +name: jbang-resteasy-code +ref: resteasy +type: code +language: + base: + data: + resource: + class-name: GreetingResource + path: "/hello-resteasy" + response: "Hello RESTEasy" + dependencies: + - io.quarkus:quarkus-resteasy + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/java/src/{resource.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/java/src/{resource.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..cbba81c0b33bf --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/code/jbang-resteasy-code/java/src/{resource.class-name}.tpl.qute.java @@ -0,0 +1,26 @@ +//usr/bin/env jbang "$0" "$@" ; exit $? +//DEPS {quarkus.bom.group-id}:{quarkus.bom.artifact-id}:{quarkus.bom.version}@pom +{#for dep in dependencies} +//DEPS {dep} +{/for} + +//JAVAC_OPTIONS -parameters + +import io.quarkus.runtime.Quarkus; +import javax.enterprise.context.ApplicationScoped; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +@Path("{resource.path}") +@ApplicationScoped +public class {resource.class-name} { + + @GET + public String sayHello() { + return "{resource.response}"; + } + + public static void main(String[] args) { + Quarkus.run(args); + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/language/java/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/language/java/codestart.yml new file mode 100644 index 0000000000000..4f1da5b0e1a88 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/language/java/codestart.yml @@ -0,0 +1,3 @@ +name: java +type: language +fallback: true diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/..gitignore b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/..gitignore new file mode 100644 index 0000000000000..3cf482a1da8dc --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/..gitignore @@ -0,0 +1,32 @@ +# Eclipse +.project +.classpath +.settings/ +bin/ + +# IntelliJ +.idea +*.ipr +*.iml +*.iws + +# NetBeans +nb-configuration.xml + +# Visual Studio Code +.vscode +.factorypath + +# OSX +.DS_Store + +# Vim +*.swp +*.swo + +# patch +*.orig +*.rej + +# Local environment +.env diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/README.tpl.qute.md new file mode 100644 index 0000000000000..05efca97c0ebd --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/base/README.tpl.qute.md @@ -0,0 +1,2 @@ +# JBang Quarkus Project + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/codestart.yml new file mode 100644 index 0000000000000..9b7e1aee929a2 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/project/quarkus-jbang/codestart.yml @@ -0,0 +1,12 @@ +name: quarkus-jbang +type: project +fallback: true +output-strategy: + "README.md": append + "readme.md": forbidden + "README.adoc": forbidden + "readme.adoc": forbidden + ".gitignore": append + "*": fail-on-duplicate + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang new file mode 100755 index 0000000000000..adf596c88e062 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang @@ -0,0 +1,163 @@ +#!/usr/bin/env bash + +# +# To run this script remotely type this in your shell +# (where ... are the arguments you want to pass to Jbang): +# curl -Ls https://sh.jbang.dev | bash -s - ... +# + +# The Java version to install when it's not installed on the system yet +javaVersion=${JBANG_DEFAULT_JAVA_VERSION:-11} + +absolute_path() { + # if the given path to the jbang launcher is absolute (i.e. it is either starting with / or a + # 'letter:/' when using gitbash on windows) it is returned unchanged, otherwise we construct an absolute path + [[ $1 = /* ]] || [[ $1 =~ ^[A-z]:/ ]] && echo "$1" || echo "$PWD/${1#./}" +} + +resolve_symlink() { + if [[ $OSTYPE != darwin* ]]; then minusFarg="-f"; fi + sym_resolved=$(readlink ${minusFarg} $1) + + if [[ -n $sym_resolved ]]; then + echo $sym_resolved + else + echo $1 + fi +} + +download() { + if [ -x "$(command -v curl)" ]; then + curl -sLf -H "Accept: application/gzip, application/octet-stream" -o "$2" $1 + retval=$? + elif [ -x "$(command -v wget)" ]; then + wget -q --header="Accept: application/gzip, application/octet-stream" -O "$2" $1 + retval=$? + else + echo "Error: curl or wget not found, please make sure one of them is installed" 1>&2 + exit 1 + fi +} + +abs_jbang_path=$(resolve_symlink $(absolute_path $0)) + +case "$(uname -s)" in + Linux*) + os=linux;; + Darwin*) + os=mac;; + CYGWIN*|MINGW*|MSYS*) + os=windows;; + *) echo "Unsupported Operating System: $(uname -s)" 1>&2; exit 1;; +esac + +case "$(uname -m)" in + i?86) + arch=x32;; + x86_64|amd64) + arch=x64;; + aarch64) + arch=aarch64;; + armv7l) + arch=arm;; + *) + echo "Unsupported Architecture: $(uname -m)" 1>&2; exit 1;; +esac + +## when mingw/git bash or cygwin fall out to just running the bat file. +if [[ $os == windows ]]; then + $(dirname $abs_jbang_path)/jbang.cmd $* + exit $? +fi + +if [[ -z "$JBANG_DIR" ]]; then JBDIR="$HOME/.jbang"; else JBDIR="$JBANG_DIR"; fi +if [[ -z "$JBANG_CACHE_DIR" ]]; then TDIR="$JBDIR/cache"; else TDIR="$JBANG_CACHE_DIR"; fi + +## resolve application jar path from script location +if [ -f "$(dirname $abs_jbang_path)/jbang.jar" ]; then + jarPath=$(dirname $abs_jbang_path)/jbang.jar +elif [ -f "$(dirname $abs_jbang_path)/.jbang/jbang.jar" ]; then + jarPath=$(dirname $abs_jbang_path)/.jbang/jbang.jar +else + if [ ! -f "$JBDIR/bin/jbang.jar" ]; then + echo "Downloading JBang..." 1>&2 + mkdir -p "$TDIR/urls" + jburl="https://github.com/jbangdev/jbang/releases/latest/download/jbang.tar" + download $jburl "$TDIR/urls/jbang.tar" + if [ $retval -ne 0 ]; then echo "Error downloading JBang" 1>&2; exit $retval; fi + echo "Installing JBang..." 1>&2 + rm -rf "$TDIR/urls/jbang" + tar xf "$TDIR/urls/jbang.tar" -C "$TDIR/urls" + if [ $retval -ne 0 ]; then echo "Error installing JBang" 1>&2; exit $retval; fi + mkdir -p "$JBDIR/bin" + rm -f "$JBDIR/bin/jbang" "$JBDIR/bin"/jbang.* + cp -f "$TDIR/urls/jbang/bin"/* "$JBDIR/bin" + fi + eval "exec $JBDIR/bin/jbang $*" +fi + +# Find/get a JDK +unset JAVA_EXEC +if [[ -n "$JAVA_HOME" ]]; then + # Determine if a (working) JDK is available in JAVA_HOME + if [ -x "$(command -v $JAVA_HOME/bin/javac)" ]; then + JAVA_EXEC="$JAVA_HOME/bin/java"; + else + echo "JAVA_HOME is set but does not seem to point to a valid Java JDK" 1>&2 + fi +fi +if [[ -z "$JAVA_EXEC" ]]; then + # Determine if a (working) JDK is available on the PATH + if [ -x "$(command -v javac)" ]; then + JAVA_EXEC="java"; + elif [ -x "$JBDIR/currentjdk/bin/javac" ]; then + export JAVA_HOME="$JBDIR/currentjdk" + JAVA_EXEC="$JBDIR/currentjdk/bin/java"; + else + export JAVA_HOME="$TDIR/jdks/$javaVersion" + JAVA_EXEC="$JAVA_HOME/bin/java" + # Check if we installed a JDK before + if [ ! -d "$TDIR/jdks/$javaVersion" ]; then + # If not, download and install it + mkdir -p "$TDIR/jdks" + echo "Downloading JDK $javaVersion. Be patient, this can take several minutes..." 1>&2 + jdkurl="https://api.adoptopenjdk.net/v3/binary/latest/$javaVersion/ga/$os/$arch/jdk/hotspot/normal/adoptopenjdk" + download $jdkurl "$TDIR/bootstrap-jdk.tgz" + if [ $retval -ne 0 ]; then echo "Error downloading JDK" 1>&2; exit $retval; fi + echo "Installing JDK $javaVersion..." 1>&2 + rm -rf "$TDIR/jdks/$javaVersion.tmp/" + mkdir -p "$TDIR/jdks/$javaVersion.tmp" + tar xf "$TDIR/bootstrap-jdk.tgz" -C "$TDIR/jdks/$javaVersion.tmp" --strip-components=1 + retval=$? + if [[ $os == mac && $retval -eq 0 ]]; then + mv "$TDIR/jdks/$javaVersion.tmp/Contents/Home/"* "$TDIR/jdks/$javaVersion.tmp/" + retval=$? + fi + if [ $retval -ne 0 ]; then + # Check if the JDK was installed properly + javac -version > /dev/null 2>&1 + retval=$? + fi + if [ $retval -ne 0 ]; then echo "Error installing JDK" 1>&2; exit $retval; fi + # Activate the downloaded JDK giving it its proper name + mv "$TDIR/jdks/$javaVersion.tmp" "$TDIR/jdks/$javaVersion" + # Set the current JDK + ${JAVA_EXEC} -classpath ${jarPath} dev.jbang.Main jdk default $javaVersion + fi + fi +fi + +## https://stackoverflow.com/questions/1668649/how-to-keep-quotes-in-bash-arguments +## attempt to ensure each argument keeps its original quoting + +## run it using command substitution to have just the user process once jbang is done +output=$(CLICOLOR_FORCE=1 ${JAVA_EXEC} ${JBANG_JAVA_OPTIONS} -classpath ${jarPath} dev.jbang.Main "$@") +err=$? +if [ $err -eq 255 ]; then + eval "exec $output" +elif [ ! -z "$output" ]; then + echo "$output" + exit $err +else + exit $err +fi diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang.cmd b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang.cmd new file mode 100644 index 0000000000000..a1e5a74337a18 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/base/jbang.cmd @@ -0,0 +1,111 @@ +@echo off +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + + +rem The Java version to install when it's not installed on the system yet +if "%JBANG_DEFAULT_JAVA_VERSION%"=="" (set javaVersion=11) else (set javaVersion=%JBANG_DEFAULT_JAVA_VERSION%) + +set os=windows +set arch=x64 + +set jburl="https://github.com/jbangdev/jbang/releases/latest/download/jbang.zip" +set jdkurl="https://api.adoptopenjdk.net/v3/binary/latest/%javaVersion%/ga/%os%/%arch%/jdk/hotspot/normal/adoptopenjdk" + +if "%JBANG_DIR%"=="" (set JBDIR=%userprofile%\.jbang) else (set JBDIR=%JBANG_DIR%) +if "%JBANG_CACHE_DIR%"=="" (set TDIR=%JBDIR%\cache) else (set TDIR=%JBANG_CACHE_DIR%) + +rem resolve application jar path from script location and convert to windows path when using cygwin +if exist "%~dp0jbang.jar" ( + set jarPath=%~dp0jbang.jar +) else if exist "%~dp0.jbang\jbang.jar" ( + set jarPath=%~dp0.jbang\jbang.jar +) else ( + if not exist "%JBDIR%\bin\jbang.jar" ( + echo Downloading JBang... 1>&2 + if not exist "%TDIR%\urls" ( mkdir "%TDIR%\urls" ) + powershell -NoProfile -ExecutionPolicy Bypass -NonInteractive -Command "$ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest %jburl% -OutFile %TDIR%\urls\jbang.zip" + if !ERRORLEVEL! NEQ 0 ( echo Error downloading JBang 1>&2 & exit /b %ERRORLEVEL% ) + echo Installing JBang... 1>&2 + if exist "%TDIR%\urls\jbang" ( rd /s /q "%TDIR%\urls\jbang" > nul 2>&1 ) + powershell -NoProfile -ExecutionPolicy Bypass -NonInteractive -Command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path %TDIR%\urls\jbang.zip -DestinationPath %TDIR%\urls" + if !ERRORLEVEL! NEQ 0 ( echo Error installing JBang 1>&2 & exit /b %ERRORLEVEL% ) + if not exist "%JBDIR%\bin" ( mkdir "%JBDIR%\bin" ) + del /f /q "%JBDIR%\bin\jbang" "%JBDIR%\bin\jbang.*" + copy /y "%TDIR%\urls\jbang\bin\*" "%JBDIR%\bin" > nul 2>&1 + ) + call "%JBDIR%\bin\jbang.cmd" %* + exit /b %ERRORLEVEL% +) + +rem Find/get a JDK +set JAVA_EXEC= +if not "%JAVA_HOME%"=="" ( + rem Determine if a (working) JDK is available in JAVA_HOME + if exist "%JAVA_HOME%\bin\javac.exe" ( + set JAVA_EXEC="%JAVA_HOME%\bin\java.exe" + ) else ( + echo JAVA_HOME is set but does not seem to point to a valid Java JDK 1>&2 + ) +) +if "!JAVA_EXEC!"=="" ( + rem Determine if a (working) JDK is available on the PATH + where javac > nul 2>&1 + if !errorlevel! equ 0 ( + set JAVA_EXEC=java.exe + ) else if exist "%JBDIR%\currentjdk\bin\javac" ( + set JAVA_HOME=%JBDIR%\currentjdk + set JAVA_EXEC=%JBDIR%\currentjdk\bin\java + ) else ( + set JAVA_HOME=%TDIR%\jdks\%javaVersion% + set JAVA_EXEC=!JAVA_HOME!\bin\java.exe + rem Check if we installed a JDK before + if not exist "%TDIR%\jdks\%javaVersion%" ( + rem If not, download and install it + if not exist "%TDIR%\jdks" ( mkdir "%TDIR%\jdks" ) + echo Downloading JDK %javaVersion%. Be patient, this can take several minutes... 1>&2 + powershell -NoProfile -ExecutionPolicy Bypass -NonInteractive -Command "$ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest %jdkurl% -OutFile %TDIR%\bootstrap-jdk.zip" + if !ERRORLEVEL! NEQ 0 ( echo Error downloading JDK 1>&2 & exit /b %ERRORLEVEL% ) + echo Installing JDK %javaVersion%... 1>&2 + if exist "%TDIR%\jdks\%javaVersion%.tmp" ( rd /s /q "%TDIR%\jdks\%javaVersion%.tmp" > nul 2>&1 ) + powershell -NoProfile -ExecutionPolicy Bypass -NonInteractive -Command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path %TDIR%\bootstrap-jdk.zip -DestinationPath %TDIR%\jdks\%javaVersion%.tmp" + if !ERRORLEVEL! NEQ 0 ( echo Error installing JDK 1>&2 & exit /b %ERRORLEVEL% ) + for /d %%d in (%TDIR%\jdks\%javaVersion%.tmp\*) do ( + powershell -NoProfile -ExecutionPolicy Bypass -NonInteractive -Command "Move-Item %%d\* !TDIR!\jdks\%javaVersion%.tmp" + if !ERRORLEVEL! NEQ 0 ( echo Error installing JDK 1>&2 & exit /b %ERRORLEVEL% ) + ) + rem Check if the JDK was installed properly + %TDIR%\jdks\%javaVersion%.tmp\bin\javac -version > nul 2>&1 + if !ERRORLEVEL! NEQ 0 ( echo "Error installing JDK" 1>&2; exit /b %ERRORLEVEL% ) + rem Activate the downloaded JDK giving it its proper name + ren "%TDIR%\jdks\%javaVersion%.tmp" "%javaVersion%" + ) + # Set the current JDK + !JAVA_EXEC! -classpath "%jarPath%" dev.jbang.Main jdk default "%javaVersion%" + ) +) + +if not exist "%TDIR%" ( mkdir "%TDIR%" ) +set tmpfile=%TDIR%\%RANDOM%.jbang.tmp +rem execute jbang and pipe to temporary random file +set JBANG_USES_POWERSHELL= +set "CMD=!JAVA_EXEC!" +SETLOCAL DISABLEDELAYEDEXPANSION +%CMD% > "%tmpfile%" %JBANG_JAVA_OPTIONS% -classpath "%jarPath%" dev.jbang.Main %* +set ERROR=%ERRORLEVEL% +rem catch errorlevel straight after; rem or FOR /F swallow would have swallowed the errorlevel + +if %ERROR% EQU 255 ( + rem read generated java command by jang, delete temporary file and execute. + for %%A in ("%tmpfile%") do for /f "usebackq delims=" %%B in (%%A) do ( + set "OUTPUT=%%B" + goto :break + ) +:break + del /f /q "%tmpfile%" + %OUTPUT% + exit /b %ERRORLEVEL% +) else ( + type "%tmpfile%" + del /f /q "%tmpfile%" + exit /b %ERROR% +) diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/codestart.yml new file mode 100644 index 0000000000000..f74c6c6c37ef8 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus-jbang/tooling/jbang-wrapper/codestart.yml @@ -0,0 +1,5 @@ +name: jbang-wrapper +type: tooling +output-strategy: + "jbang": "executable" + "jbang.cmd": "executable" \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/..gitignore b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/..gitignore new file mode 100644 index 0000000000000..f318f00be30d3 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/..gitignore @@ -0,0 +1,3 @@ +# Gradle +.gradle/ +build/ diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/build-layout.include.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/build-layout.include.qute new file mode 100644 index 0000000000000..8fcd7516eba57 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/build-layout.include.qute @@ -0,0 +1,51 @@ +{#insert plugins} +plugins { + id("{quarkus.plugin.id}") +} +{/} + +{#insert repositories} +repositories { + mavenLocal() + mavenCentral() +{#if gradle.repositories} +{#for rep in gradle.repositories} + maven { url = uri("{rep.url}") } +{/for} +{/if} +} +{/} + +{#insert dependencies} +val quarkusPlatformGroupId: String by project +val quarkusPlatformArtifactId: String by project +val quarkusPlatformVersion: String by project + +dependencies { + implementation(enforcedPlatform("$\{quarkusPlatformGroupId}:$\{quarkusPlatformArtifactId}:$\{quarkusPlatformVersion}")) +{#for dep in dependencies} + implementation("{dep}") +{/for} + testImplementation("io.quarkus:quarkus-junit5") +{#for dep in test-dependencies} + testImplementation("{dep}") +{/for} +} +{/} + +{#insert project} +group = "{project.group-id}" +version = "{project.version}" +{/} + +{#insert java} +java { + {#if java.version == "11"} + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + {#else} + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + {/if} +} +{/} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/gradle.tpl.qute.properties b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/gradle.tpl.qute.properties new file mode 100644 index 0000000000000..d3160667382c5 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/gradle.tpl.qute.properties @@ -0,0 +1,6 @@ +#Gradle properties +quarkusPluginId={quarkus.gradle-plugin.id} +quarkusPluginVersion={quarkus.gradle-plugin.version} +quarkusPlatformGroupId={quarkus.platform.group-id} +quarkusPlatformArtifactId={quarkus.platform.artifact-id} +quarkusPlatformVersion={quarkus.platform.version} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/settings.tpl.qute.gradle.kts b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/settings.tpl.qute.gradle.kts new file mode 100644 index 0000000000000..67eb7e52a3b19 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/base/settings.tpl.qute.gradle.kts @@ -0,0 +1,18 @@ +pluginManagement { + val quarkusPluginVersion: String by settings + val quarkusPluginId: String by settings + repositories { + mavenLocal() + mavenCentral() + gradlePluginPortal() +{#if gradle.plugin-repositories} +{#for rep in gradle.plugin-repositories} + maven { url = uri("{rep.url}") } +{/for} +{/if} + } + plugins { + id(quarkusPluginId) version quarkusPluginVersion + } +} +rootProject.name="{project.artifact-id}" diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/codestart.yml new file mode 100644 index 0000000000000..6a922b02e7cf1 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/codestart.yml @@ -0,0 +1,27 @@ +name: gradle-kotlin-dsl +type: buildtool +language: + base: + shared-data: + quarkus: + gradle-plugin: + id: io.quarkus + buildtool: + build-dir: build + cli: gradle + guide: https://quarkus.io/guides/gradle-tooling + guide-native: https://quarkus.io/guides/gradle-tooling#building-a-native-executable + cmd: + dev: quarkusDev + package: build + package-uber-jar: build -Dquarkus.package.type=uber-jar + package-legacy-jar: build -Dquarkus.package.type=legacy-jar + package-native: build -Dquarkus.package.type=native + package-native-container: build -Dquarkus.package.type=native -Dquarkus.native.container-build=true + build-ci: build + kotlin: + dependencies: + - org.jetbrains.kotlin:kotlin-stdlib-jdk8 + scala: + dependencies: + - org.scala-lang:scala-library:2.12.13 diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/java/build.tpl.qute.gradle.kts b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/java/build.tpl.qute.gradle.kts new file mode 100644 index 0000000000000..07df0d995bd6d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/java/build.tpl.qute.gradle.kts @@ -0,0 +1,13 @@ +{#include build-layout} +{#plugins} +plugins { + java + id("{quarkus.gradle-plugin.id}") +} +{/plugins} +{/include} + +tasks.withType { + options.encoding = "UTF-8" + options.compilerArgs.add("-parameters") +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/kotlin/build.tpl.qute.gradle.kts b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/kotlin/build.tpl.qute.gradle.kts new file mode 100644 index 0000000000000..a90e0705a1086 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/kotlin/build.tpl.qute.gradle.kts @@ -0,0 +1,24 @@ +{#include build-layout} +{#plugins} +plugins { + kotlin("jvm") version "{kotlin.version}" + kotlin("plugin.allopen") version "{kotlin.version}" + id("{quarkus.gradle-plugin.id}") +} +{/plugins} +{/include} + +allOpen { + annotation("javax.ws.rs.Path") + annotation("javax.enterprise.context.ApplicationScoped") + annotation("io.quarkus.test.junit.QuarkusTest") +} + +tasks.withType { + {#if java.version == "11"} + kotlinOptions.jvmTarget = JavaVersion.VERSION_11.toString() + {#else} + kotlinOptions.jvmTarget = JavaVersion.VERSION_1_8.toString() + {/if} + kotlinOptions.javaParameters = true +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/scala/build.tpl.qute.gradle.kts b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/scala/build.tpl.qute.gradle.kts new file mode 100644 index 0000000000000..a6c4baa92a1a3 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle-kotlin-dsl/scala/build.tpl.qute.gradle.kts @@ -0,0 +1,19 @@ +{#include build-layout} +{#plugins} +plugins { + scala + id("{quarkus.gradle-plugin.id}") +} +{/plugins} +{/include} + +tasks.withType { + scalaCompileOptions.encoding = "UTF-8" + {#if java.version == "11"} + sourceCompatibility = JavaVersion.VERSION_11.toString() + targetCompatibility = JavaVersion.VERSION_11.toString() + {#else} + sourceCompatibility = JavaVersion.VERSION_1_8.toString() + targetCompatibility = JavaVersion.VERSION_1_8.toString() + {/if} +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/..gitignore b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/..gitignore new file mode 100644 index 0000000000000..f318f00be30d3 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/..gitignore @@ -0,0 +1,3 @@ +# Gradle +.gradle/ +build/ diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/build-layout.include.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/build-layout.include.qute new file mode 100644 index 0000000000000..2331838993019 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/build-layout.include.qute @@ -0,0 +1,47 @@ +{#insert plugins} +plugins { + id '{quarkus.gradle-plugin.id}' +} +{/} + +{#insert repositories} +repositories { + mavenLocal() + mavenCentral() +{#if gradle.repositories} +{#for rep in gradle.repositories} + maven { url "{rep.url}" } +{/for} +{/if} +} +{/} + +{#insert dependencies} +dependencies { + implementation enforcedPlatform("$\{quarkusPlatformGroupId}:$\{quarkusPlatformArtifactId}:$\{quarkusPlatformVersion}") +{#for dep in dependencies} + implementation '{dep}' +{/for} + testImplementation 'io.quarkus:quarkus-junit5' +{#for dep in test-dependencies} + testImplementation '{dep}' +{/for} +} +{/} + +{#insert project} +group '{project.group-id}' +version '{project.version}' +{/} + +{#insert java} +java { + {#if java.version == "11"} + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + {#else} + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + {/if} +} +{/} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/gradle.tpl.qute.properties b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/gradle.tpl.qute.properties new file mode 100644 index 0000000000000..5d57e047ec255 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/gradle.tpl.qute.properties @@ -0,0 +1,8 @@ +#Gradle properties +quarkusPluginId={quarkus.gradle-plugin.id} +quarkusPluginVersion={quarkus.gradle-plugin.version} +quarkusPlatformGroupId={quarkus.platform.group-id} +quarkusPlatformArtifactId={quarkus.platform.artifact-id} +quarkusPlatformVersion={quarkus.platform.version} + +org.gradle.logging.level=INFO \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/settings.tpl.qute.gradle b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/settings.tpl.qute.gradle new file mode 100644 index 0000000000000..74e5936fa0038 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/base/settings.tpl.qute.gradle @@ -0,0 +1,16 @@ +pluginManagement { + repositories { + mavenLocal() + mavenCentral() + gradlePluginPortal() +{#if gradle.plugin-repositories} +{#for rep in gradle.plugin-repositories} + maven { url "{rep.url}" } +{/for} +{/if} + } + plugins { + id "$\{quarkusPluginId}" version "$\{quarkusPluginVersion}" + } +} +rootProject.name='{project.artifact-id}' diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/codestart.yml new file mode 100644 index 0000000000000..a0959b85c4253 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/codestart.yml @@ -0,0 +1,27 @@ +name: gradle +type: buildtool +language: + base: + shared-data: + quarkus: + gradle-plugin: + id: io.quarkus + buildtool: + build-dir: build + cli: gradle + guide: https://quarkus.io/guides/gradle-tooling + guide-native: https://quarkus.io/guides/gradle-tooling#building-a-native-executable + cmd: + dev: quarkusDev + package: build + package-uber-jar: build -Dquarkus.package.type=uber-jar + package-legacy-jar: build -Dquarkus.package.type=legacy-jar + package-native: build -Dquarkus.package.type=native + package-native-container: build -Dquarkus.package.type=native -Dquarkus.native.container-build=true + build-ci: build + kotlin: + dependencies: + - org.jetbrains.kotlin:kotlin-stdlib-jdk8 + scala: + dependencies: + - org.scala-lang:scala-library:2.12.13 diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/java/build.tpl.qute.gradle b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/java/build.tpl.qute.gradle new file mode 100644 index 0000000000000..2933d71345aa9 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/java/build.tpl.qute.gradle @@ -0,0 +1,17 @@ +{#include build-layout} +{#plugins} +plugins { + id 'java' + id '{quarkus.gradle-plugin.id}' +} +{/plugins} +{/include} + +compileJava { + options.encoding = 'UTF-8' + options.compilerArgs << '-parameters' +} + +compileTestJava { + options.encoding = 'UTF-8' +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/kotlin/build.tpl.qute.gradle b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/kotlin/build.tpl.qute.gradle new file mode 100644 index 0000000000000..687990707f197 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/kotlin/build.tpl.qute.gradle @@ -0,0 +1,32 @@ +{#include build-layout} +{#plugins} +plugins { + id 'org.jetbrains.kotlin.jvm' version "{kotlin.version}" + id "org.jetbrains.kotlin.plugin.allopen" version "{kotlin.version}" + id '{quarkus.gradle-plugin.id}' +} +{/plugins} +{/include} + +allOpen { + annotation("javax.ws.rs.Path") + annotation("javax.enterprise.context.ApplicationScoped") + annotation("io.quarkus.test.junit.QuarkusTest") +} + +compileKotlin { + {#if java.version == "11"} + kotlinOptions.jvmTarget = JavaVersion.VERSION_11 + {#else} + kotlinOptions.jvmTarget = JavaVersion.VERSION_1_8 + {/if} + kotlinOptions.javaParameters = true +} + +compileTestKotlin { + {#if java.version == "11"} + kotlinOptions.jvmTarget = JavaVersion.VERSION_11 + {#else} + kotlinOptions.jvmTarget = JavaVersion.VERSION_1_8 + {/if} +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/scala/build.tpl.qute.gradle b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/scala/build.tpl.qute.gradle new file mode 100644 index 0000000000000..6119eea5168af --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/gradle/scala/build.tpl.qute.gradle @@ -0,0 +1,19 @@ +{#include build-layout} +{#plugins} +plugins { + id 'scala' + id '{quarkus.gradle-plugin.id}' +} +{/plugins} +{/include} + +compileScala { + scalaCompileOptions.encoding = 'UTF-8' + {#if java.version == "11"} + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + {#else} + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + {/if} +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/..gitignore b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/..gitignore new file mode 100644 index 0000000000000..1e4ecfdc2a7f0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/..gitignore @@ -0,0 +1,6 @@ +#Maven +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +release.properties diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/pom.tpl.qute.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/pom.tpl.qute.xml new file mode 100644 index 0000000000000..45b8902c61187 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/base/pom.tpl.qute.xml @@ -0,0 +1,175 @@ + + 4.0.0 + {project.group-id} + {project.artifact-id} + {project.version} + + + UTF-8 + UTF-8 + {java.version} + {java.version} + true + + {quarkus.platform.group-id} + {quarkus.platform.artifact-id} + {quarkus.platform.version} + {quarkus.maven-plugin.version} + {maven-compiler-plugin.version} + {maven-surefire-plugin.version} + {#if uberjar} + uber-jar + {/if} + + + + + + $\{quarkus.platform.group-id} + $\{quarkus.platform.artifact-id} + $\{quarkus.platform.version} + pom + import + + + + {#if maven.repositories} + + + {#for rep in maven.repositories} + + {rep.id} + {rep.url} + + {rep.releases-enabled ?: true} + + + {rep.snapshots-enabled ?: true} + + + {/for} + + {/if} + {#if maven.plugin-repositories} + + {#for rep in maven.plugin-repositories} + + {rep.id} + {rep.url} + + {rep.releases-enabled ?: true} + + + {rep.snapshots-enabled ?: true} + + + {/for} + + {/if} + + + {#each dependencies} + + {it.groupId} + {it.artifactId} + {#if it.version} + {it.version} + {/if} + + {/each} + + + + io.quarkus + quarkus-junit5 + test + + {#each test-dependencies} + + {it.groupId} + {it.artifactId} + {#if it.version} + {it.version} + {/if} + test + + {/each} + + + + + + + {quarkus.maven-plugin.group-id} + {quarkus.maven-plugin.artifact-id} + $\{quarkus-plugin.version} + true + + + + build + generate-code + generate-code-tests + + + + + + maven-compiler-plugin + $\{compiler-plugin.version} + + + + org.apache.maven.plugins + maven-surefire-plugin + $\{surefire-plugin.version} + + + org.jboss.logmanager.LogManager + $\{maven.home} + + + + + + + + + native + + + native + + + + native + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + $\{surefire-plugin.version} + + + + integration-test + verify + + + + $\{project.build.directory}/$\{project.build.finalName}-runner + org.jboss.logmanager.LogManager + $\{maven.home} + + + + + + + + + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/codestart.yml new file mode 100644 index 0000000000000..6078db1331f83 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/codestart.yml @@ -0,0 +1,19 @@ +name: maven +type: buildtool +fallback: true +language: + base: + shared-data: + buildtool: + build-dir: target + cli: mvn + guide: https://quarkus.io/guides/maven-tooling.html + guide-native: https://quarkus.io/guides/building-native-image + cmd: + dev: compile quarkus:dev + package: package + package-uber-jar: package -Dquarkus.package.type=uber-jar + package-legacy-jar: package -Dquarkus.package.type=legacy-jar + package-native: package -Pnative + package-native-container: package -Pnative -Dquarkus.native.container-build=true + build-ci: verify -B diff --git a/devtools/aesh/README.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/java/.gitkeep similarity index 100% rename from devtools/aesh/README.md rename to devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/java/.gitkeep diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/kotlin/pom.tpl.qute.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/kotlin/pom.tpl.qute.xml new file mode 100644 index 0000000000000..034755124e4b7 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/kotlin/pom.tpl.qute.xml @@ -0,0 +1,67 @@ + + + {kotlin.version} + + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + + + + io.rest-assured + kotlin-extensions + test + + + + src/main/kotlin + src/test/kotlin + + + + kotlin-maven-plugin + org.jetbrains.kotlin + $\{kotlin.version} + + + compile + + compile + + + + test-compile + + test-compile + + + + + true + 1.8 + + + all-open + + + + + + + + + + + + + org.jetbrains.kotlin + kotlin-maven-allopen + $\{kotlin.version} + + + + + + \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/scala/pom.tpl.qute.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/scala/pom.tpl.qute.xml new file mode 100644 index 0000000000000..a491ce465d382 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/buildtool/maven/scala/pom.tpl.qute.xml @@ -0,0 +1,62 @@ + + + {scala-maven-plugin.version} + {scala.version} + + + + io.quarkus + quarkus-scala + + + org.scala-lang + scala-library + $\{scala.version} + + + org.scala-lang + scala-reflect + $\{scala.version} + + + + src/main/scala + src/test/scala + + + net.alchim31.maven + scala-maven-plugin + $\{scala-maven-plugin.version} + + + scala-compile-first + process-resources + + add-source + compile + + + + scala-test-compile + process-test-resources + + add-source + testCompile + + + + + $\{scala.version} + + -deprecation + -feature + -explaintypes + -target:jvm-1.8 + -Ypartial-unification + + + + + + \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/properties/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/properties/codestart.yml new file mode 100644 index 0000000000000..0da257dbb35ea --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/properties/codestart.yml @@ -0,0 +1,9 @@ +name: config-properties +type: config +fallback: true +language: + base: + shared-data: + config: + guide: https://quarkus.io/guides/config + file-name: application.properties \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/yaml/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/yaml/codestart.yml new file mode 100644 index 0000000000000..cbdd9c9db0eb2 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/config/yaml/codestart.yml @@ -0,0 +1,10 @@ +name: config-yaml +type: config +language: + base: + shared-data: + config: + guide: https://quarkus.io/guides/config#yaml + file-name: application.yml + dependencies: + - io.quarkus:quarkus-config-yaml \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/codestart.yml new file mode 100644 index 0000000000000..9b0c1ece311d1 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/codestart.yml @@ -0,0 +1,18 @@ +name: commandmode-example +ref: commandmode +type: code +tags: example +metadata: + title: Command Mode example + description: Go go commando with this command mode main class. +language: + base: + data: + main: + class-name: HelloCommando + package-name: org.acme + greeting: + message: "Hello" + default-name: "Commando" + dependencies: + - io.quarkus:quarkus-arc diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/java/src/main/java/org/acme/{main.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/java/src/main/java/org/acme/{main.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..dfc21d94a5acc --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/java/src/main/java/org/acme/{main.class-name}.tpl.qute.java @@ -0,0 +1,19 @@ +package org.acme; + +import javax.enterprise.context.control.ActivateRequestContext; +import javax.inject.Inject; + +import io.quarkus.runtime.Quarkus; +import io.quarkus.runtime.QuarkusApplication; +import io.quarkus.runtime.annotations.QuarkusMain; + +@QuarkusMain +public class {main.class-name} implements QuarkusApplication { + + @Override + public int run(String... args) throws Exception { + final String name = args.length > 0 ? String.join(" ", args) : "{greeting.default-name}"; + System.out.println("{greeting.message} " + name); + return 0; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/kotlin/src/main/kotlin/org/acme/{main.class-name}.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/kotlin/src/main/kotlin/org/acme/{main.class-name}.tpl.qute.kt new file mode 100644 index 0000000000000..8b326610e49ed --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/commandmode-example/kotlin/src/main/kotlin/org/acme/{main.class-name}.tpl.qute.kt @@ -0,0 +1,17 @@ +package org.acme + +import javax.inject.Inject + +import io.quarkus.runtime.QuarkusApplication +import io.quarkus.runtime.annotations.QuarkusMain + +@QuarkusMain +class {main.class-name}: QuarkusApplication { + + override fun run(vararg args: String?): Int { + val name = if (args.isNotEmpty()) args.joinToString(",") else "{greeting.default-name}" + println(name) + return 0 + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..c06aeeb1b4634 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry path=resource.path/} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/codestart.yml new file mode 100644 index 0000000000000..585b0f88623f5 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/codestart.yml @@ -0,0 +1,19 @@ +name: resteasy-example +ref: resteasy +type: code +tags: example +metadata: + title: RESTEasy JAX-RS example + description: REST is easy peasy with this Hello World RESTEasy resource. + related-guide-section: https://quarkus.io/guides/getting-started#the-jax-rs-resources +language: + base: + data: + resource: + class-name: GreetingResource + path: "/hello-resteasy" + response: "Hello RESTEasy" + dependencies: + - io.quarkus:quarkus-resteasy + test-dependencies: + - io.rest-assured:rest-assured diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..769ac7eccf80a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java @@ -0,0 +1,16 @@ +package org.acme; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +@Path("{resource.path}") +public class {resource.class-name} { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "{resource.response}"; + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java new file mode 100644 index 0000000000000..9462e8558f054 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java @@ -0,0 +1,9 @@ +package org.acme; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class Native{resource.class-name}IT extends {resource.class-name}Test { + + // Execute the same tests but in native mode. +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java new file mode 100644 index 0000000000000..aca41d9528af0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java @@ -0,0 +1,21 @@ +package org.acme; + +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; + +@QuarkusTest +public class {resource.class-name}Test { + + @Test + public void testHelloEndpoint() { + given() + .when().get("{resource.path}") + .then() + .statusCode(200) + .body(is("{resource.response}")); + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt new file mode 100644 index 0000000000000..6cf3f82e70791 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt @@ -0,0 +1,14 @@ +package org.acme + +import javax.ws.rs.GET +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.core.MediaType + +@Path("{resource.path}") +class {resource.class-name} { + + @GET + @Produces(MediaType.TEXT_PLAIN) + fun hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt new file mode 100644 index 0000000000000..49b2a833857a5 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT : {resource.class-name}Test() \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt new file mode 100644 index 0000000000000..1e11d2c0caa30 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + fun testHelloEndpoint() { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala new file mode 100644 index 0000000000000..9c1d477333e38 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala @@ -0,0 +1,12 @@ +package org.acme + +import javax.ws.rs.\{GET, Path, Produces} +import javax.ws.rs.core.MediaType + +@Path("{resource.path}") +class {resource.class-name} { + + @GET + @Produces(Array[String](MediaType.TEXT_PLAIN)) + def hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala new file mode 100644 index 0000000000000..cdcb18d028a16 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT extends {resource.class-name}Test \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala new file mode 100644 index 0000000000000..07681f8df9c8b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + def testHelloEndpoint() = { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..c06aeeb1b4634 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry path=resource.path/} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/codestart.yml new file mode 100644 index 0000000000000..163a7d1048b9c --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/codestart.yml @@ -0,0 +1,19 @@ +name: resteasy-reactive-example +ref: resteasy-reactive +type: code +tags: example +metadata: + title: RESTEasy Reactive example + description: Rest is easy peasy & reactive with this Hello World RESTEasy Reactive resource. + related-guide-section: https://quarkus.io/guides/getting-started-reactive#reactive-jax-rs-resources +language: + base: + data: + resource: + class-name: ReactiveGreetingResource + path: "/hello-resteasy-reactive" + response: "Hello RESTEasy Reactive" + dependencies: + - io.quarkus:quarkus-resteasy-reactive + test-dependencies: + - io.rest-assured:rest-assured diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..769ac7eccf80a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java @@ -0,0 +1,16 @@ +package org.acme; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +@Path("{resource.path}") +public class {resource.class-name} { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "{resource.response}"; + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java new file mode 100644 index 0000000000000..9462e8558f054 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java @@ -0,0 +1,9 @@ +package org.acme; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class Native{resource.class-name}IT extends {resource.class-name}Test { + + // Execute the same tests but in native mode. +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java new file mode 100644 index 0000000000000..aca41d9528af0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java @@ -0,0 +1,21 @@ +package org.acme; + +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; + +@QuarkusTest +public class {resource.class-name}Test { + + @Test + public void testHelloEndpoint() { + given() + .when().get("{resource.path}") + .then() + .statusCode(200) + .body(is("{resource.response}")); + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt new file mode 100644 index 0000000000000..6cf3f82e70791 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt @@ -0,0 +1,14 @@ +package org.acme + +import javax.ws.rs.GET +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.core.MediaType + +@Path("{resource.path}") +class {resource.class-name} { + + @GET + @Produces(MediaType.TEXT_PLAIN) + fun hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt new file mode 100644 index 0000000000000..49b2a833857a5 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT : {resource.class-name}Test() \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt new file mode 100644 index 0000000000000..1e11d2c0caa30 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + fun testHelloEndpoint() { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala new file mode 100644 index 0000000000000..9c1d477333e38 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala @@ -0,0 +1,12 @@ +package org.acme + +import javax.ws.rs.\{GET, Path, Produces} +import javax.ws.rs.core.MediaType + +@Path("{resource.path}") +class {resource.class-name} { + + @GET + @Produces(Array[String](MediaType.TEXT_PLAIN)) + def hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala new file mode 100644 index 0000000000000..cdcb18d028a16 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT extends {resource.class-name}Test \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala new file mode 100644 index 0000000000000..07681f8df9c8b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/resteasy-reactive-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + def testHelloEndpoint() = { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..c06aeeb1b4634 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry path=resource.path/} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/codestart.yml new file mode 100644 index 0000000000000..a4c51fa3e6ba8 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/codestart.yml @@ -0,0 +1,20 @@ +name: spring-web-example +ref: spring-web +type: code +tags: example +metadata: + title: Spring Web example + description: Spring, the Quarkus way! A Hello World Spring Web Controller. + related-guide-section: https://quarkus.io/guides/spring-web#greetingcontroller +language: + base: + data: + resource: + class-name: SpringGreetingController + path: "/hello-spring" + response: "Hello Spring" + package-name: org.acme + dependencies: + - io.quarkus:quarkus-spring-web + test-dependencies: + - io.rest-assured:rest-assured diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java new file mode 100644 index 0000000000000..7af3fc242b976 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/main/java/org/acme/{resource.class-name}.tpl.qute.java @@ -0,0 +1,15 @@ +package org.acme; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("{resource.path}") +public class {resource.class-name} { + + @GetMapping + public String hello() { + return "{resource.response}"; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java new file mode 100644 index 0000000000000..9462e8558f054 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/native-test/java/org/acme/Native{resource.class-name}IT.tpl.qute.java @@ -0,0 +1,9 @@ +package org.acme; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class Native{resource.class-name}IT extends {resource.class-name}Test { + + // Execute the same tests but in native mode. +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java new file mode 100644 index 0000000000000..aca41d9528af0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/java/src/test/java/org/acme/{resource.class-name}Test.tpl.qute.java @@ -0,0 +1,21 @@ +package org.acme; + +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; + +@QuarkusTest +public class {resource.class-name}Test { + + @Test + public void testHelloEndpoint() { + given() + .when().get("{resource.path}") + .then() + .statusCode(200) + .body(is("{resource.response}")); + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt new file mode 100644 index 0000000000000..20ca8ba781287 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/main/kotlin/org/acme/{resource.class-name}.tpl.qute.kt @@ -0,0 +1,14 @@ +package org.acme + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + + +@RestController +@RequestMapping("{resource.path}") +class {resource.class-name} { + + @GetMapping + fun hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt new file mode 100644 index 0000000000000..49b2a833857a5 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/native-test/kotlin/org/acme/Native{resource.class-name}IT.tpl.qute.kt @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT : {resource.class-name}Test() \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt new file mode 100644 index 0000000000000..1e11d2c0caa30 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/kotlin/src/test/kotlin/org/acme/{resource.class-name}Test.tpl.qute.kt @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + fun testHelloEndpoint() { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala new file mode 100644 index 0000000000000..b72ae46ba7e5d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/main/scala/org/acme/{resource.class-name}.tpl.qute.scala @@ -0,0 +1,12 @@ +package org.acme; + +import org.springframework.web.bind.annotation.\{GetMapping, RequestMapping, RestController} + + +@RestController +@RequestMapping(Array[String]("{resource.path}")) +class {resource.class-name} { + + @GetMapping + def hello() = "{resource.response}" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala new file mode 100644 index 0000000000000..cdcb18d028a16 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/native-test/scala/org/acme/Native{resource.class-name}IT.tpl.qute.scala @@ -0,0 +1,6 @@ +package org.acme + +import io.quarkus.test.junit.NativeImageTest + +@NativeImageTest +class Native{resource.class-name}IT extends {resource.class-name}Test \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala new file mode 100644 index 0000000000000..07681f8df9c8b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/examples/spring-web-example/scala/src/test/scala/org/acme/{resource.class-name}Test.tpl.qute.scala @@ -0,0 +1,20 @@ +package org.acme + +import io.quarkus.test.junit.QuarkusTest +import io.restassured.RestAssured.given +import org.hamcrest.CoreMatchers.`is` +import org.junit.jupiter.api.Test + +@QuarkusTest +class {resource.class-name}Test { + + @Test + def testHelloEndpoint() = { + given() + .`when`().get("{resource.path}") + .then() + .statusCode(200) + .body(`is`("{resource.response}")) + } + +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/java/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/java/codestart.yml new file mode 100644 index 0000000000000..2846850c7f356 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/java/codestart.yml @@ -0,0 +1,9 @@ +name: java +type: language +fallback: true +language: + base: + shared-data: + language: + dir: + code: src/main/java \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/kotlin/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/kotlin/codestart.yml new file mode 100644 index 0000000000000..fa9d59b1dde5c --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/kotlin/codestart.yml @@ -0,0 +1,10 @@ +name: kotlin +type: language +language: + base: + shared-data: + language: + dir: + code: src/main/kotlin + dependencies: + io.quarkus:quarkus-kotlin \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/scala/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/scala/codestart.yml new file mode 100644 index 0000000000000..86cd415f7d5d8 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/language/scala/codestart.yml @@ -0,0 +1,10 @@ +name: scala +type: language +language: + base: + shared-data: + language: + dir: + code: src/main/scala + dependencies: + io.quarkus:quarkus-scala \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/..gitignore b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/..gitignore new file mode 100644 index 0000000000000..3cf482a1da8dc --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/..gitignore @@ -0,0 +1,32 @@ +# Eclipse +.project +.classpath +.settings/ +bin/ + +# IntelliJ +.idea +*.ipr +*.iml +*.iws + +# NetBeans +nb-configuration.xml + +# Visual Studio Code +.vscode +.factorypath + +# OSX +.DS_Store + +# Vim +*.swp +*.swo + +# patch +*.orig +*.rej + +# Local environment +.env diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/README.tpl.qute.md new file mode 100644 index 0000000000000..202524bba241a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/README.tpl.qute.md @@ -0,0 +1,61 @@ +# {project.artifact-id} project + +This project uses Quarkus, the Supersonic Subatomic Java Framework. + +If you want to learn more about Quarkus, please visit its website: https://quarkus.io/ . + +## Running the application in dev mode + +You can run your application in dev mode that enables live coding using: +```shell script +{buildtool.cli} {buildtool.cmd.dev} +``` + +> **_NOTE:_** Quarkus now ships with a Dev UI, which is available in dev mode only at http://localhost:8080/q/dev/. + +## Packaging and running the application + +The application can be packaged using: +```shell script +{buildtool.cli} {buildtool.cmd.package} +``` +It produces the `quarkus-run.jar` file in the `{buildtool.build-dir}/quarkus-app/` directory. +Be aware that it’s not an _über-jar_ as the dependencies are copied into the `{buildtool.build-dir}/quarkus-app/lib/` directory. + +If you want to build an _über-jar_, execute the following command: +```shell script +{buildtool.cli} {buildtool.cmd.package-uber-jar} +``` + +The application is now runnable using `java -jar {buildtool.build-dir}/quarkus-app/quarkus-run.jar`. + +## Creating a native executable + +You can create a native executable using: +```shell script +{buildtool.cli} {buildtool.cmd.package-native} +``` + +Or, if you don't have GraalVM installed, you can run the native executable build in a container using: +```shell script +{buildtool.cli} {buildtool.cmd.package-native-container} +``` + +You can then execute your native executable with: `./{buildtool.build-dir}/{project.artifact-id}-{project.version}-runner` + +If you want to learn more about building native executables, please consult {buildtool.guide}. + +{#if input.selected-extensions} +## Related guides + +{#for ext in input.selected-extensions} +{#if ext.guide} +- {ext.name} ([guide]({ext.guide})): {ext.description} +{/if} +{/for} + +{/if} +{#if input.selected-examples} +## Provided examples +{/if} + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/index-entry.include.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/index-entry.include.qute new file mode 100644 index 0000000000000..c11ee97da52d2 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/index-entry.include.qute @@ -0,0 +1,7 @@ +
    +

    {title}

    +

    {description}

    + {#if path}

    @Path: {path}

    {/if} + {#if related-guide-section}

    Related guide section...

    {/if} + {#insert body}{/} +
    diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/readme-header.include.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/readme-header.include.qute new file mode 100644 index 0000000000000..0b0f1d29bdb7d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/readme-header.include.qute @@ -0,0 +1,7 @@ +### {title} + +{description} +{#if related-guide-section} + +[Related guide section...]({related-guide-section}) +{/if} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/src/main/resources/META-INF/resources/index.tpl.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/src/main/resources/META-INF/resources/index.tpl.qute.html new file mode 100644 index 0000000000000..7106cecab53f6 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/src/main/resources/META-INF/resources/index.tpl.qute.html @@ -0,0 +1,176 @@ + + + + + {project.artifact-id} - {project.version} + + + + + + +
    +
    +

    Congratulations, you have created a new Quarkus cloud application.

    + +

    What is this page?

    + +

    This page is served by Quarkus. The source is in + src/main/resources/META-INF/resources/index.html.

    + +

    What are your next steps?

    + +

    If not already done, run the application in dev mode using: {buildtool.cli} {buildtool.cmd.dev}. +

    +
      +
    • Your static assets are located in src/main/resources/META-INF/resources.
    • +
    • Configure your application in src/main/resources/{config.file-name}.
    • +
    • Quarkus now ships with a Dev UI (available in dev mode only)
    • +
    • Play with the getting started example code located in {language.dir.code}:
    • +
    + {merged-content} +
    +
    +
    +

    Application

    +
      +
    • GroupId: {project.group-id}
    • +
    • ArtifactId: {project.artifact-id}
    • +
    • Version: {project.version}
    • +
    • Quarkus Version: {quarkus.version}
    • +
    +
    +
    +

    Do you like Quarkus?

    +
      +
    • Go give it a star on GitHub.
    • +
    +
    + {#if input.selected-extensions} +
    +

    Selected extensions guides

    +
      + {#for ext in input.selected-extensions} + {#if ext.guide} +
    • {ext.name} guide
    • + {/if} + {/for} +
    +
    + {/if} + +
    +
    + + \ No newline at end of file diff --git a/devtools/gradle/src/test/resources/gradle-project/settings.gradle b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/src/main/resources/application.yml similarity index 100% rename from devtools/gradle/src/test/resources/gradle-project/settings.gradle rename to devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/base/src/main/resources/application.yml diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/codestart.yml new file mode 100644 index 0000000000000..43f1b95f233c4 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/codestart.yml @@ -0,0 +1,28 @@ +name: quarkus +type: project +fallback: true +output-strategy: + "pom.xml": smart-pom-merge + "README.md": append + "readme.md": forbidden + "README.adoc": forbidden + "readme.adoc": forbidden + ".gitignore": append + "src/main/resources/META-INF/resources/index.html": content-merge + "src/main/resources/application.yml": smart-config-merge + "src/main/resources/application.properties": forbidden + "src/test/resources/application.yml": smart-config-merge + "src/test/resources/application.properties": forbidden + "*.java": smart-package + "*.kt": smart-package + "*.scala": smart-package + "*": fail-on-duplicate +language: + base: + shared-data: + project: + group-id: org.acme + artifact-id: quarkus-project + version: 1.0.0-SNAPSHOT + dependencies: + - io.quarkus:quarkus-arc diff --git a/extensions/vault/runtime/src/main/resources/META-INF/beans.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/java/src/main/java/.gitkeep similarity index 100% rename from extensions/vault/runtime/src/main/resources/META-INF/beans.xml rename to devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/java/src/main/java/.gitkeep diff --git a/integration-tests/hibernate-search-elasticsearch/src/main/resources/META-INF/beans.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/kotlin/src/main/kotlin/.gitkeep similarity index 100% rename from integration-tests/hibernate-search-elasticsearch/src/main/resources/META-INF/beans.xml rename to devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/kotlin/src/main/kotlin/.gitkeep diff --git a/integration-tests/jpa-postgresql/src/main/resources/META-INF/beans.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/scala/src/main/scala/.gitkeep similarity index 100% rename from integration-tests/jpa-postgresql/src/main/resources/META-INF/beans.xml rename to devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/project/quarkus/scala/src/main/scala/.gitkeep diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/.dockerignore.tpl.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/.dockerignore.tpl.qute new file mode 100644 index 0000000000000..cbc0ab2aa6fa6 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/.dockerignore.tpl.qute @@ -0,0 +1,5 @@ +* +!{buildtool.build-dir}/*-runner +!{buildtool.build-dir}/*-runner.jar +!{buildtool.build-dir}/lib/* +!{buildtool.build-dir}/quarkus-app/* \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/Dockerfile-layout.include.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/Dockerfile-layout.include.qute new file mode 100644 index 0000000000000..9cd67f2754a56 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/Dockerfile-layout.include.qute @@ -0,0 +1,48 @@ +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode +# +# Before building the container image run: +# +# {#insert quarkusbuild /} +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.{type} -t quarkus/{project.artifact-id}-{type} . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/{project.artifact-id}-{type} +# +# If you want to include the debug port into your docker image +# you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050 +# +# Then run the container using : +# +# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/{project.artifact-id}-{type} +# +### +FROM {#insert image /} +{#insert args /} +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +# Install java and the run-java script +# Also set up permissions for user `1001` +RUN microdnf install curl ca-certificates $\{JAVA_PACKAGE} \ + && microdnf update \ + && microdnf clean all \ + && mkdir /deployments \ + && chown 1001 /deployments \ + && chmod "g+rwX" /deployments \ + && chown 1001:root /deployments \ + && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/$\{RUN_JAVA_VERSION}/run-java-sh-$\{RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \ + && chown 1001 /deployments/run-java.sh \ + && chmod 540 /deployments/run-java.sh \ + && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/lib/security/java.security + +# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. +ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" +{#insert copy /} + +EXPOSE 8080 +USER 1001 + +ENTRYPOINT [ "/deployments/run-java.sh" ] diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.jvm b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.jvm new file mode 100644 index 0000000000000..7dd930589ab34 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.jvm @@ -0,0 +1,16 @@ +{#include Dockerfile-layout type='jvm'} + {#quarkusbuild}{buildtool.cli} {buildtool.cmd.package}{/quarkusbuild} + {#image}{dockerfile.jvm.from} {/image} + {#args} +ARG JAVA_PACKAGE={dockerfile.jvm.java-package} +ARG RUN_JAVA_VERSION={dockerfile.jvm.run-java-version} + {/args} + {#copy} +# We make four distinct layers so if there are application changes the library layers can be re-used +COPY --chown=1001 {buildtool.build-dir}/quarkus-app/lib/ /deployments/lib/ +COPY --chown=1001 {buildtool.build-dir}/quarkus-app/*.jar /deployments/ +COPY --chown=1001 {buildtool.build-dir}/quarkus-app/app/ /deployments/app/ +COPY --chown=1001 {buildtool.build-dir}/quarkus-app/quarkus/ /deployments/quarkus/ + {/copy} +{/include} + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.legacy-jar b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.legacy-jar new file mode 100644 index 0000000000000..4f30fe8d7fa14 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.legacy-jar @@ -0,0 +1,12 @@ +{#include Dockerfile-layout type='legacy-jar'} + {#quarkusbuild}{buildtool.cli} {buildtool.cmd.package-legacy-jar}{/quarkusbuild} + {#image}{dockerfile.legacy-jar.from} {/image} + {#args} +ARG JAVA_PACKAGE={dockerfile.legacy-jar.java-package} +ARG RUN_JAVA_VERSION={dockerfile.legacy-jar.run-java-version} + {/args} + {#copy} +COPY {buildtool.build-dir}/lib/* /deployments/lib/ +COPY {buildtool.build-dir}/*-runner.jar /deployments/app.jar + {/copy} +{/include} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native new file mode 100644 index 0000000000000..7e2697acbb008 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native @@ -0,0 +1,27 @@ +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode +# +# Before building the container image run: +# +# {buildtool.cli} {buildtool.cmd.package-native} +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.native -t quarkus/{project.artifact-id} . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/{project.artifact-id} +# +### +FROM {dockerfile.native.from} +WORKDIR /work/ +RUN chown 1001 /work \ + && chmod "g+rwX" /work \ + && chown 1001:root /work +COPY --chown=1001:root {buildtool.build-dir}/*-runner /work/application + +EXPOSE 8080 +USER 1001 + +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native-distroless b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native-distroless new file mode 100644 index 0000000000000..768451a3c7809 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/base/src/main/docker/Dockerfile.tpl.qute.native-distroless @@ -0,0 +1,23 @@ +#### +# This Dockerfile is used in order to build a distroless container that runs the Quarkus application in native (no JVM) mode +# +# Before building the container image run: +# +# {buildtool.cli} {buildtool.cmd.package-native} +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.native-distroless -t quarkus/{project.artifact-id} . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/{project.artifact-id} +# +### +FROM {dockerfile.native-distroless.from} +COPY {buildtool.build-dir}/*-runner /application + +EXPOSE 8080 +USER nonroot + +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/codestart.yml new file mode 100644 index 0000000000000..3a3d6f97845a4 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/dockerfiles/codestart.yml @@ -0,0 +1,18 @@ +name: dockerfiles +type: tooling +language: + base: + data: + dockerfile: + jvm: + from: registry.access.redhat.com/ubi8/ubi-minimal:8.3 + java-package: java-11-openjdk-headless + run-java-version: 1.3.8 + legacy-jar: + from: registry.access.redhat.com/ubi8/ubi-minimal:8.3 + java-package: java-11-openjdk-headless + run-java-version: 1.3.8 + native: + from: registry.access.redhat.com/ubi8/ubi-minimal:8.3 + native-distroless: + from: quay.io/quarkus/quarkus-distroless-image:1.0 diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/base/.github/workflows/ci.tpl.qute.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/base/.github/workflows/ci.tpl.qute.yml new file mode 100644 index 0000000000000..59956fe437203 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/base/.github/workflows/ci.tpl.qute.yml @@ -0,0 +1,27 @@ +## This is basic continuous integration build for your Quarkus application. + +name: Quarkus Codestart CI + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up JDK {java.version} + uses: actions/setup-java@v1 + with: + java-version: {java.version} + - name: Build + {#if buildtool.cli == 'gradle'} + uses: eskatos/gradle-command-action@v1 + with: + arguments: {buildtool.cmd.build-ci} + {#else} + run: {buildtool.cli} {buildtool.cmd.build-ci} + {/if} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/codestart.yml new file mode 100644 index 0000000000000..e933401728f61 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/github-action/codestart.yml @@ -0,0 +1,2 @@ +name: github-action +type: tooling diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.jar b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000..62d4c053550b9 Binary files /dev/null and b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.jar differ diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.tpl.qute.properties b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.tpl.qute.properties new file mode 100644 index 0000000000000..41f6d9639814a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradle/wrapper/gradle-wrapper.tpl.qute.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-{gradle.version}-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew new file mode 100644 index 0000000000000..fbd7c515832da --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew @@ -0,0 +1,185 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew.bat b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew.bat new file mode 100755 index 0000000000000..a9f778a7a964b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/base/gradlew.bat @@ -0,0 +1,104 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/codestart.yml new file mode 100644 index 0000000000000..2caf1d448717d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/gradle-wrapper/codestart.yml @@ -0,0 +1,13 @@ +name: gradle-wrapper +type: tooling +output-strategy: + "gradlew": "executable" + "gradlew.bat": "executable" +language: + base: + data: + gradle: + version: 6.8.3 + shared-data: + buildtool: + cli: ./gradlew diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/MavenWrapperDownloader.tpl.qute.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/MavenWrapperDownloader.tpl.qute.java new file mode 100644 index 0000000000000..2e3f96e7dcdaf --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/MavenWrapperDownloader.tpl.qute.java @@ -0,0 +1,117 @@ +/* + * Copyright 2007-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.net.*; +import java.io.*; +import java.nio.channels.*; +import java.util.Properties; + +public class MavenWrapperDownloader { + + private static final String WRAPPER_VERSION = "{maven.wrapper-version}"; + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" + + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to + * use instead of the default one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = + ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = + ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if(mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if(mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if(!outputFile.getParentFile().exists()) { + if(!outputFile.getParentFile().mkdirs()) { + System.out.println( + "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { + String username = System.getenv("MVNW_USERNAME"); + char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(username, password); + } + }); + } + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/maven-wrapper.tpl.qute.properties b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/maven-wrapper.tpl.qute.properties new file mode 100644 index 0000000000000..bb5fb3b4e3ead --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/.mvn/wrapper/maven-wrapper.tpl.qute.properties @@ -0,0 +1,2 @@ +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/{maven.version}/apache-maven-{maven.version}-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/{maven.wrapper-version}/maven-wrapper-{maven.wrapper-version}.jar diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute new file mode 100755 index 0000000000000..8f133b17bbeb7 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute @@ -0,0 +1,310 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "$\{wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "$\{basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + if [ -n "$MVNW_REPOURL" ]; then + jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/{maven.wrapper-version}/maven-wrapper-{maven.wrapper-version}.jar" + else + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/{maven.wrapper-version}/maven-wrapper-{maven.wrapper-version}.jar" + fi + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + if $cygwin; then + wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + fi + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget "$jarUrl" -O "$wrapperJarPath" + else + wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl -o "$wrapperJarPath" "$jarUrl" -f + else + curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f + fi + + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaClass=`cygpath --path --windows "$javaClass"` + fi + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=$\{MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=$\{M2_HOME}" "-Dmaven.multiModuleProjectDirectory=$\{MAVEN_PROJECTBASEDIR}" \ + $\{WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute.cmd b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute.cmd new file mode 100644 index 0000000000000..c351488bf453e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/base/mvnw.tpl.qute.cmd @@ -0,0 +1,182 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/{maven.wrapper-version}/maven-wrapper-{maven.wrapper-version}.jar" + +FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/{maven.wrapper-version}/maven-wrapper-{maven.wrapper-version}.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/codestart.yml new file mode 100644 index 0000000000000..5146e94641432 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/core/tooling/maven-wrapper/codestart.yml @@ -0,0 +1,14 @@ +name: maven-wrapper +type: tooling +output-strategy: + "mvnw": "executable" + "mvnw.cmd": "executable" +language: + base: + data: + maven: + version: 3.6.3 + wrapper-version: 0.5.6 + shared-data: + buildtool: + cli: ./mvnw diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..4d1a05ebb5d47 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/README.tpl.qute.md @@ -0,0 +1,3 @@ +{#include readme-header /} + +The Quarkus configuration location is `src/main/resources/{config.file-name}`. diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..a1768e062a782 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/application.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/application.yml new file mode 100755 index 0000000000000..57ebddd227388 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/base/src/main/resources/application.yml @@ -0,0 +1,5 @@ +display: + mach: 3 + unit: + name: mph + factor: 2.23694 \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/codestart.yml new file mode 100644 index 0000000000000..a9ac14deb627b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/codestart.yml @@ -0,0 +1,13 @@ +name: config-example +ref: config-yaml +type: code +tags: example +metadata: + title: YAML Config example + description: This Supersonic example displays mach speed in your favourite unit, depending on the specified Quarkus configuration. + path: /config/supersonic + related-guide-section: https://quarkus.io/guides/config-reference#configuration-examples +language: + base: + dependencies: + - io.quarkus:quarkus-resteasy diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/java/src/main/java/org/acme/config/ConfigResource.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/java/src/main/java/org/acme/config/ConfigResource.java new file mode 100755 index 0000000000000..60925f5da57ad --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/java/src/main/java/org/acme/config/ConfigResource.java @@ -0,0 +1,41 @@ +package org.acme.config; + +import org.eclipse.microprofile.config.inject.ConfigProperty; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import java.math.BigDecimal; +import java.util.Optional; + +@Path("/config") +public class ConfigResource { + + @ConfigProperty(name = "constant.speed-of-sound-in-meter-per-second", defaultValue = "343") + int speedOfSound; + + @ConfigProperty(name = "display.mach") + Optional displayMach; + + @ConfigProperty(name = "display.unit.name") + String displayUnitName; + + @ConfigProperty(name = "display.unit.factor") + BigDecimal displayUnitFactor; + + @GET + @Path("supersonic") + @Produces(MediaType.TEXT_PLAIN) + public String supersonic() { + final int mach = displayMach.orElse(1); + final BigDecimal speed = BigDecimal.valueOf(speedOfSound) + .multiply(displayUnitFactor) + .multiply(BigDecimal.valueOf(mach)); + return String.format("Mach %d is %.3f %s", + mach, + speed, + displayUnitName + ); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/kotlin/src/main/kotlin/org/acme/config/ConfigResource.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/kotlin/src/main/kotlin/org/acme/config/ConfigResource.kt new file mode 100755 index 0000000000000..9e4062a8dc020 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/config-example/kotlin/src/main/kotlin/org/acme/config/ConfigResource.kt @@ -0,0 +1,39 @@ +package org.acme.config + +import org.eclipse.microprofile.config.inject.ConfigProperty +import java.math.BigDecimal +import java.util.* +import javax.ws.rs.GET +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.core.MediaType + +@Path("/config") +class ConfigResource { + @ConfigProperty(name = "constant.speed-of-sound-in-meter-per-second", defaultValue = "343") + var speedOfSound = 0 + + @ConfigProperty(name = "display.mach") + lateinit var displayMach: Optional + + @ConfigProperty(name = "display.unit.name") + lateinit var displayUnitName: String + + @ConfigProperty(name = "display.unit.factor") + lateinit var displayUnitFactor: BigDecimal + + @GET + @Path("supersonic") + @Produces(MediaType.TEXT_PLAIN) + fun supersonic(): String { + val mach = displayMach.orElse(1) + val speed = BigDecimal.valueOf(speedOfSound.toLong()) + .multiply(displayUnitFactor) + .multiply(BigDecimal.valueOf(mach.toLong())) + return String.format("Mach %d is %.3f %s", + mach, + speed, + displayUnitName + ) + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..ccb61df404b1e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..a1768e062a782 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/codestart.yml new file mode 100644 index 0000000000000..c07bf5bbabf23 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/codestart.yml @@ -0,0 +1,15 @@ +name: funqy-http-example +ref: funqy-http +type: code +tags: example +metadata: + title: Funqy HTTP example + description: Quark is a type of elementary particle and a fundamental constituent of matter, Charm is a Quark flavor. Try to guess the Charm Quark symbol with this Funqy function by changing the ?value= query param. Go Subatomic! + path: /charm?value=s + related-guide-section: https://quarkus.io/guides/funqy-http#get-query-parameter-mapping +language: + base: + dependencies: + - io.quarkus:quarkus-funqy-http + test-dependencies: + - io.rest-assured:rest-assured diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/main/java/org/acme/funqy/Funqy.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/main/java/org/acme/funqy/Funqy.java new file mode 100644 index 0000000000000..1459171f0a48a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/main/java/org/acme/funqy/Funqy.java @@ -0,0 +1,19 @@ +package org.acme.funqy; + +import io.quarkus.funqy.Funq; + +import java.util.Random; + +public class Funqy { + + private static final String CHARM_QUARK_SYMBOL = "c"; + + @Funq + public String charm(Answer answer) { + return CHARM_QUARK_SYMBOL.equalsIgnoreCase(answer.value) ? "You Quark!" : "👻 Wrong answer"; + } + + public static class Answer { + public String value; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java new file mode 100644 index 0000000000000..cce22690603da --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java @@ -0,0 +1,10 @@ +package org.acme.funqy; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class FunqyIT extends FunqyTest { + + // Run the same tests + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/test/java/org/acme/funqy/FunqyTest.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/test/java/org/acme/funqy/FunqyTest.java new file mode 100644 index 0000000000000..54e2f89371594 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/funqy-http-example/java/src/test/java/org/acme/funqy/FunqyTest.java @@ -0,0 +1,23 @@ +package org.acme.funqy; + +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.containsString; + +@QuarkusTest +public class FunqyTest { + + @Test + public void testCharm() { + given() + .contentType("application/json") + .body("{\"value\": \"c\"}") + .post("/charm") + .then() + .statusCode(200) + .body(containsString("You Quark!")); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..a1768e062a782 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/application.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/application.yml new file mode 100644 index 0000000000000..595acd4bf3c00 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/base/src/main/resources/application.yml @@ -0,0 +1,7 @@ +quarkus: + log: + console: + json: + pretty-print: true + date-format: YYYY-MM-dd HH:mm:ss + exception-output-type: detailed-and-formatted \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/codestart.yml new file mode 100644 index 0000000000000..6837a7ec36c57 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/codestart.yml @@ -0,0 +1,14 @@ +name: logging-json-example +ref: logging-json +type: code +tags: example +metadata: + title: Logging JSON example + description: This example let you go faster with your jet aircraft, your speed is logged when you send a new request.
    When you reach the speed of sound, a "Sonic Boom" error is going to be thrown and logged. Boom! + path: /logging-json/faster + related-guide-section: https://quarkus.io/guides/logging#configuration +language: + base: + dependencies: + - io.quarkus:quarkus-resteasy + - io.quarkus:quarkus-logging-json diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/java/src/main/java/org/acme/logging/json/LoggingJsonResource.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/java/src/main/java/org/acme/logging/json/LoggingJsonResource.java new file mode 100644 index 0000000000000..5f6e996669b2e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/java/src/main/java/org/acme/logging/json/LoggingJsonResource.java @@ -0,0 +1,35 @@ +package org.acme.logging.json; + +import org.jboss.logging.Logger; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.ServerErrorException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; + +@Path("/logging-json") +public class LoggingJsonResource { + + private static final Logger LOG = Logger.getLogger(LoggingJsonResource.class); + private static final int SPEED_OF_SOUND_IN_METER_PER_SECOND = 343; + + private final AtomicInteger speed = new AtomicInteger(0); + private final Random random = new Random(); + + @GET + @Path("faster") + @Produces(MediaType.TEXT_PLAIN) + public String faster() { + final int s = speed.addAndGet(random.nextInt(200)); + if (s > SPEED_OF_SOUND_IN_METER_PER_SECOND) { + throw new ServerErrorException("💥 SONIC BOOOOOM!!!", Response.Status.SERVICE_UNAVAILABLE); + } + String message = String.format("Your jet aircraft speed is %s m/s.", s); + LOG.info(message); + return message + " Watch the logs..."; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/kotlin/src/main/kotlin/org/acme/logging/json/LoggingJsonResource.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/kotlin/src/main/kotlin/org/acme/logging/json/LoggingJsonResource.kt new file mode 100644 index 0000000000000..3972de99a5c3a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/logging-json/kotlin/src/main/kotlin/org/acme/logging/json/LoggingJsonResource.kt @@ -0,0 +1,35 @@ +package org.acme.logging.json + +import org.jboss.logging.Logger +import java.util.* +import java.util.concurrent.atomic.AtomicInteger +import javax.ws.rs.GET +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.ServerErrorException +import javax.ws.rs.core.MediaType +import javax.ws.rs.core.Response + +@Path("/logging-json") +class LoggingJsonResource { + private val speed = AtomicInteger(0) + private val random = Random() + + @GET + @Path("faster") + @Produces(MediaType.TEXT_PLAIN) + fun faster(): String { + val s = speed.addAndGet(random.nextInt(200)) + if (s > SPEED_OF_SOUND_IN_METER_PER_SECOND) { + throw ServerErrorException("💥 SONIC BOOOOOM!!!", Response.Status.SERVICE_UNAVAILABLE) + } + val message = "Your jet aircraft speed is $s m/s." + LOG.info(message) + return "$message Watch the logs..." + } + + companion object { + private val LOG = Logger.getLogger(LoggingJsonResource::class.java) + private const val SPEED_OF_SOUND_IN_METER_PER_SECOND = 343 + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..eb1ae983ef01e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/base/README.tpl.qute.md @@ -0,0 +1,8 @@ +{#include readme-header /} + +Also for picocli applications the dev mode is supported. When running dev mode, the picocli application is executed and on press of the Enter key, is restarted. + +As picocli applications will often require arguments to be passed on the commandline, this is also possible in dev mode via: +```shell script +{buildtool.cli} {buildtool.cmd.dev} {#if input.base-codestart.buildtool == 'gradle' || input.base-codestart.buildtool == 'gradle-kotlin-dsl'}--quarkus-args{#else}-Dquarkus.args{/if}='hello --first-name=Quarky' +``` diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/codestart.yml new file mode 100644 index 0000000000000..84a8140dd0b23 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/codestart.yml @@ -0,0 +1,12 @@ +name: picocli-example +ref: picocli +type: code +tags: example +metadata: + title: Picocli Example + description: Hello and goodbye are civilization fundamentals. Let's not forget it with this example picocli application by changing the command and parameters. + related-guide-section: https://quarkus.io/guides/picocli#command-line-application-with-multiple-commands +language: + base: + dependencies: + - io.quarkus:quarkus-picocli diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/EntryCommand.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/EntryCommand.java new file mode 100755 index 0000000000000..4447b7610861e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/EntryCommand.java @@ -0,0 +1,9 @@ +package org.acme.picocli; + +import io.quarkus.picocli.runtime.annotations.TopCommand; +import picocli.CommandLine; + +@TopCommand +@CommandLine.Command(mixinStandardHelpOptions = true, subcommands = { HelloCommand.class, GoodbyeCommand.class }) +public class EntryCommand { +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/GoodbyeCommand.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/GoodbyeCommand.java new file mode 100644 index 0000000000000..653ba227a5540 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/picocli-example/java/src/main/java/org/acme/picocli/GoodbyeCommand.java @@ -0,0 +1,21 @@ +package org.acme.picocli; + +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; + +@Command(name = "goodbye") +public class GoodbyeCommand implements Runnable { + + @Option(names = {"--name"}, description = "Guest name") + String name; + + @Option(names = {"--times", "-t"}, defaultValue = "1", description = "How many time should we say goodbye") + int times; + + @Override + public void run() { + for (int i = 0;i quarks = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); + + public JacksonResource() { + quarks.add(new Quark("Up", "The up quark or u quark (symbol: u) is the lightest of all quarks, a type of elementary particle, and a major constituent of matter.")); + quarks.add(new Quark("Strange", "The strange quark or s quark (from its symbol, s) is the third lightest of all quarks, a type of elementary particle.")); + quarks.add(new Quark("Charm", "The charm quark, charmed quark or c quark (from its symbol, c) is the third most massive of all quarks, a type of elementary particle.")); + quarks.add(new Quark("???", null)); + } + + @GET + public Set list() { + return quarks; + } + + @POST + public Set add(Quark quark) { + quarks.add(quark); + return quarks; + } + + @DELETE + public Set delete(Quark quark) { + quarks.removeIf(existingQuark -> existingQuark.name.contentEquals(quark.name)); + return quarks; + } + + public static class Quark { + public String name; + public String description; + + public Quark() { + } + + public Quark(String name, String description) { + this.name = name; + this.description = description; + } + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/java/src/main/java/org/acme/resteasyjackson/MyObjectMapperCustomizer.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/java/src/main/java/org/acme/resteasyjackson/MyObjectMapperCustomizer.java new file mode 100644 index 0000000000000..9aab532b25d51 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/java/src/main/java/org/acme/resteasyjackson/MyObjectMapperCustomizer.java @@ -0,0 +1,17 @@ +package org.acme.resteasyjackson; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.quarkus.jackson.ObjectMapperCustomizer; + +import javax.inject.Singleton; + +@Singleton +public class MyObjectMapperCustomizer implements ObjectMapperCustomizer { + + @Override + public void customize(ObjectMapper objectMapper) { + // To suppress serializing properties with null values + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/JacksonResource.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/JacksonResource.kt new file mode 100644 index 0000000000000..05b638be16534 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/JacksonResource.kt @@ -0,0 +1,48 @@ +package org.acme.resteasyjackson + +import java.util.* +import javax.ws.rs.* +import javax.ws.rs.core.MediaType + +@Path("/resteasy-jackson/quarks") +@Produces(MediaType.APPLICATION_JSON) +@Consumes(MediaType.APPLICATION_JSON) +class JacksonResource { + + private val quarks = Collections.newSetFromMap(Collections.synchronizedMap(LinkedHashMap())) + + init { + quarks.add(Quark("Up", "The up quark or u quark (symbol: u) is the lightest of all quarks, a type of elementary particle, and a major constituent of matter.")) + quarks.add(Quark("Strange", "The strange quark or s quark (from its symbol, s) is the third lightest of all quarks, a type of elementary particle.")) + quarks.add(Quark("Charm", "The charm quark, charmed quark or c quark (from its symbol, c) is the third most massive of all quarks, a type of elementary particle.")) + quarks.add(Quark("???", null)) + } + + @GET + fun list(): Set { + return quarks + } + + @POST + fun add(quark: Quark): Set { + quarks.add(quark) + return quarks + } + + @DELETE + fun delete(quark: Quark): Set { + quarks.removeIf { existingQuark: Quark -> existingQuark.name!!.contentEquals(quark.name!!) } + return quarks + } + + class Quark { + var name: String? = null + var description: String? = null + + constructor() {} + constructor(name: String?, description: String?) { + this.name = name + this.description = description + } + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/MyObjectMapperCustomizer.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/MyObjectMapperCustomizer.kt new file mode 100644 index 0000000000000..fd5b34937dd2c --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-jackson-example/kotlin/src/main/kotlin/org/acme/resteasyjackson/MyObjectMapperCustomizer.kt @@ -0,0 +1,14 @@ +package org.acme.resteasyjackson + +import com.fasterxml.jackson.annotation.JsonInclude +import com.fasterxml.jackson.databind.ObjectMapper +import io.quarkus.jackson.ObjectMapperCustomizer +import javax.inject.Singleton + +@Singleton +class MyObjectMapperCustomizer : ObjectMapperCustomizer { + override fun customize(objectMapper: ObjectMapper) { + // To suppress serializing properties with null values + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..a1768e062a782 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/templates/page.qute.tpl.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/templates/page.qute.tpl.qute.html new file mode 100644 index 0000000000000..14982d0f5b581 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/base/src/main/resources/templates/page.qute.tpl.qute.html @@ -0,0 +1,47 @@ +\{! This parameter declarations makes it possible to validate expressions in the template !} +\{@java.util.List<{project.package-name ?: 'org.acme'}.resteasyqute.Quark> quarks} + + + + + Qute & Quarks + + + +

    Qute & Quarks

    + +

    Welcome to our Qute subatomic-particles generator.

    + +

    + Generate a new quark +

    + +
    + \{#for quark in quarks} +
    + \{quark.flavor.toString.toLowerCase} +
    + \{/for} +
    + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/codestart.yml new file mode 100644 index 0000000000000..be9b0f4a246ca --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/codestart.yml @@ -0,0 +1,13 @@ +name: resteasy-qute-example +ref: resteasy-qute +type: code +tags: example +metadata: + title: RESTEasy Qute example + description: This example uses a RESTEasy & Qute template to render a subatomic-particle generator web page. So Qute! + path: /qute/quarks + related-guide-section: https://quarkus.io/guides/qute#type-safe-templates +language: + base: + dependencies: + - io.quarkus:quarkus-resteasy-qute diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/Quark.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/Quark.java new file mode 100644 index 0000000000000..7c54bb8ad80fe --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/Quark.java @@ -0,0 +1,25 @@ +package org.acme.resteasyqute; + +public class Quark { + public enum Flavor { DOWN, UP, STRANGE, CHARM, BOTTOM, TOP } + + public enum Color { + RED("#ff6961"), + GREEN("#77dd77"), + BLUE("#aec6cf"); + + public final String hex; + + Color(String hex) { + this.hex = hex; + } + } + + public final Flavor flavor; + public final Color color; + + public Quark(Flavor flavor, Color color) { + this.flavor = flavor; + this.color = color; + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/QuteResource.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/QuteResource.java new file mode 100644 index 0000000000000..91dec46ab26b6 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/java/src/main/java/org/acme/resteasyqute/QuteResource.java @@ -0,0 +1,55 @@ +package org.acme.resteasyqute; + +import io.quarkus.qute.Template; +import io.quarkus.qute.TemplateExtension; +import io.quarkus.qute.TemplateInstance; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Random; + +@Path("/qute/quarks") +public class QuteResource { + + private final List quarks = Collections.synchronizedList(new ArrayList<>()); + + @Inject + Template page; + + public QuteResource() { + for (int i = 0; i < 3; i++) { + this.addQuark(); + } + } + + @GET + @Produces(MediaType.TEXT_HTML) + public TemplateInstance get() { + return page.data("quarks", new ArrayList<>(quarks)); + } + + @POST + @Path("add") + public void addQuark() { + final Random random = new Random(); + final Quark.Flavor flavor = Quark.Flavor.values()[random.nextInt(Quark.Flavor.values().length)]; + final Quark.Color color = Quark.Color.values()[random.nextInt(Quark.Color.values().length)]; + quarks.add(new Quark(flavor, color)); + } + + /** + * This template extension method implements the "position" computed property. + */ + @TemplateExtension + static int position(Quark quark) { + return new Random().nextInt(100); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/Quark.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/Quark.kt new file mode 100644 index 0000000000000..18b7a813d3361 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/Quark.kt @@ -0,0 +1,11 @@ +package org.acme.resteasyqute + +class Quark(val flavor: Flavor, val color: Color) { + enum class Flavor { + DOWN, UP, STRANGE, CHARM, BOTTOM, TOP + } + + enum class Color(val hex: String) { + RED("#ff6961"), GREEN("#77dd77"), BLUE("#aec6cf"); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/QuteResource.kt b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/QuteResource.kt new file mode 100644 index 0000000000000..cf292f66726be --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/resteasy-qute-example/kotlin/src/main/kotlin/org/acme/resteasyqute/QuteResource.kt @@ -0,0 +1,54 @@ +package org.acme.resteasyqute + +import io.quarkus.qute.Template +import io.quarkus.qute.TemplateExtension +import io.quarkus.qute.TemplateInstance +import java.util.* +import javax.inject.Inject +import javax.ws.rs.GET +import javax.ws.rs.POST +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.core.MediaType + +@Path("/qute/quarks") +class QuteResource { + private val quarks = Collections.synchronizedList(ArrayList()) + + init { + for (i in 0..2) { + addQuark() + } + } + + @Inject + lateinit var page: Template + + @GET + @Produces(MediaType.TEXT_HTML) + fun get(): TemplateInstance { + return page.data("quarks", ArrayList(quarks)) + } + + @POST + @Path("add") + fun addQuark() { + val random = Random() + val flavor = Quark.Flavor.values()[random.nextInt(Quark.Flavor.values().size)] + val color = Quark.Color.values()[random.nextInt(Quark.Color.values().size)] + quarks.add(Quark(flavor, color)) + } + + @TemplateExtension + class QuarkExtension { + companion object { + /** + * This template extension method implements the "position" computed property. + */ + @JvmStatic + fun position(quark: Quark): Int { + return Random().nextInt(100) + } + } + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..405280158877d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/README.tpl.qute.md @@ -0,0 +1 @@ +{#include readme-header /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/src/main/resources/META-INF/resources/index.entry.qute.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/src/main/resources/META-INF/resources/index.entry.qute.html new file mode 100644 index 0000000000000..a1768e062a782 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/base/src/main/resources/META-INF/resources/index.entry.qute.html @@ -0,0 +1 @@ +{#include index-entry /} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/codestart.yml new file mode 100644 index 0000000000000..953e14ba8dd21 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/codestart.yml @@ -0,0 +1,13 @@ +name: undertow-websockets-example +ref: undertow-websockets +type: code +tags: example +metadata: + title: WebSockets example using Undertow + description: Discover WebSockets using Undertow with this cool supersonic chat example. Open multiple tabs to simulate different users. + path: /supersonic-chat.html + related-guide-section: https://quarkus.io/guides/websockets +language: + base: + dependencies: + - io.quarkus:quarkus-undertow-websockets diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/java/org/acme/undertowwebsockets/SupersonicChatSocket.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/java/org/acme/undertowwebsockets/SupersonicChatSocket.java new file mode 100644 index 0000000000000..f918cb2635dd8 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/java/org/acme/undertowwebsockets/SupersonicChatSocket.java @@ -0,0 +1,62 @@ +package org.acme.undertowwebsockets; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import javax.enterprise.context.ApplicationScoped; +import javax.websocket.OnClose; +import javax.websocket.OnError; +import javax.websocket.OnMessage; +import javax.websocket.OnOpen; +import javax.websocket.Session; +import javax.websocket.server.PathParam; +import javax.websocket.server.ServerEndpoint; + +import org.jboss.logging.Logger; + +@ServerEndpoint("/chat/{username}") +@ApplicationScoped +public class SupersonicChatSocket { + + private static final Logger LOG = Logger.getLogger(SupersonicChatSocket.class); + + Map sessions = new ConcurrentHashMap<>(); + + @OnOpen + public void onOpen(Session session, @PathParam("username") String username) { + sessions.put(username, session); + } + + @OnClose + public void onClose(Session session, @PathParam("username") String username) { + sessions.remove(username); + broadcast("User " + username + " left"); + } + + @OnError + public void onError(Session session, @PathParam("username") String username, Throwable throwable) { + sessions.remove(username); + LOG.error("onError", throwable); + broadcast("User " + username + " left on error: " + throwable); + } + + @OnMessage + public void onMessage(String message, @PathParam("username") String username) { + if (message.equalsIgnoreCase("_ready_")) { + broadcast("User " + username + " joined"); + } else { + broadcast(">> " + username + ": " + message); + } + } + + private void broadcast(String message) { + sessions.values().forEach(s -> { + s.getAsyncRemote().sendObject(message, result -> { + if (result.getException() != null) { + System.out.println("Unable to send message: " + result.getException()); + } + }); + }); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/resources/META-INF/resources/supersonic-chat.html b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/resources/META-INF/resources/supersonic-chat.html new file mode 100644 index 0000000000000..9083febafa014 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/main/resources/META-INF/resources/supersonic-chat.html @@ -0,0 +1,115 @@ + + + + + + Quarkus Supersonic Chat! + + + + + + + + +
    +
    +
    + + +
    +
    +
    +
    + +
    +
    + + +
    + +
    + + + + + + + + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/test/java/org/acme/undertowwebsockets/SupersonicChatSocketTest.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/test/java/org/acme/undertowwebsockets/SupersonicChatSocketTest.java new file mode 100644 index 0000000000000..99f521e51df9e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/examples/undertow-websockets-example/java/src/test/java/org/acme/undertowwebsockets/SupersonicChatSocketTest.java @@ -0,0 +1,55 @@ +package org.acme.undertowwebsockets; + +import java.net.URI; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import javax.websocket.ClientEndpoint; +import javax.websocket.ContainerProvider; +import javax.websocket.OnMessage; +import javax.websocket.OnOpen; +import javax.websocket.Session; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.common.http.TestHTTPResource; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class SupersonicChatSocketTest { + + private static final LinkedBlockingDeque MESSAGES = new LinkedBlockingDeque<>(); + + @TestHTTPResource("/chat/stu") + URI uri; + + @Test + public void testWebsocketChat() throws Exception { + try (Session session = ContainerProvider.getWebSocketContainer().connectToServer(Client.class, uri)) { + Assertions.assertEquals("CONNECT", MESSAGES.poll(10, TimeUnit.SECONDS)); + Assertions.assertEquals("User stu joined", MESSAGES.poll(10, TimeUnit.SECONDS)); + session.getAsyncRemote().sendText("hello world"); + Assertions.assertEquals(">> stu: hello world", MESSAGES.poll(10, TimeUnit.SECONDS)); + } + } + + @ClientEndpoint + public static class Client { + + @OnOpen + public void open(Session session) { + MESSAGES.add("CONNECT"); + // Send a message to indicate that we are ready, + // as the message handler may not be registered immediately after this callback. + session.getAsyncRemote().sendText("_ready_"); + } + + @OnMessage + void message(String msg) { + MESSAGES.add(msg); + } + + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..d0ee60125a88b --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/base/README.tpl.qute.md @@ -0,0 +1,4 @@ +{#include readme-header /} + +> :warning: **INCOMPATIBLE WITH DEV MODE**: Amazon Lambda Binding is not compatible with dev mode yet! + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/codestart.yml new file mode 100644 index 0000000000000..8afe7ecbb7afb --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/codestart.yml @@ -0,0 +1,16 @@ +name: amazon-lambda-example +ref: amazon-lambda +type: code +tags: + - example + - singleton-example +metadata: + title: Amazon Lambda Integration example + description: This example contains a Quarkus Greeting Lambda ready for Amazon. + related-guide-section: https://quarkus.io/guides/amazon-lambda +language: + base: + dependencies: + - io.quarkus:quarkus-amazon-lambda + test-dependencies: + - io.quarkus:quarkus-test-amazon-lambda diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/payload.json b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/payload.json new file mode 100644 index 0000000000000..decf49104ca4a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/payload.json @@ -0,0 +1,3 @@ +{ + "name": "Bill" +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/GreetingLambda.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/GreetingLambda.java new file mode 100644 index 0000000000000..54397ca107a8a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/GreetingLambda.java @@ -0,0 +1,12 @@ +package org.acme.lambda; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +public class GreetingLambda implements RequestHandler { + + @Override + public String handleRequest(Person input, Context context) { + return "Hello " + input.getName(); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/Person.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/Person.java new file mode 100644 index 0000000000000..6052788936db0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/main/java/org/acme/lambda/Person.java @@ -0,0 +1,15 @@ +package org.acme.lambda; + +public class Person { + + private String name; + + public String getName() { + return name; + } + + public Person setName(String name) { + this.name = name; + return this; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/native-test/java/org/acme/lambda/LambdaHandlerTestIT.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/native-test/java/org/acme/lambda/LambdaHandlerTestIT.java new file mode 100644 index 0000000000000..477313b413ebd --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/native-test/java/org/acme/lambda/LambdaHandlerTestIT.java @@ -0,0 +1,9 @@ +package org.acme.lambda; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class LambdaHandlerTestIT extends LambdaHandlerTest { + + // Execute the same tests but in native mode. +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/java/org/acme/lambda/LambdaHandlerTest.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/java/org/acme/lambda/LambdaHandlerTest.java new file mode 100644 index 0000000000000..3ba4687dd2d6f --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/java/org/acme/lambda/LambdaHandlerTest.java @@ -0,0 +1,20 @@ +package org.acme.lambda; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.quarkus.amazon.lambda.test.LambdaClient; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class LambdaHandlerTest { + + @Test + public void testSimpleLambdaSuccess() throws Exception { + Person in = new Person(); + in.setName("Stu"); + String out = LambdaClient.invoke(String.class, in); + Assertions.assertEquals("Hello Stu", out); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/resources/application.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/resources/application.yml new file mode 100644 index 0000000000000..6f64d4a18d957 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/amazon-lambda-example/java/src/test/resources/application.yml @@ -0,0 +1,3 @@ +quarkus.lambda.enable-polling-jvm-mode: true + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..26809cc0bf4ec --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/README.tpl.qute.md @@ -0,0 +1,3 @@ +{#include readme-header /} + +> :warning: **INCOMPATIBLE WITH NATIVE**: Azure Functions Integration is not compatible with native yet! diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/function.tpl.qute.json b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/function.tpl.qute.json new file mode 100644 index 0000000000000..ee7d430606236 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/function.tpl.qute.json @@ -0,0 +1,17 @@ +{ + "scriptFile" : "../{project.artifact-id}-{project.version}.jar", + "entryPoint" : "io.quarkus.azure.functions.resteasy.runtime.Function.run", + "bindings" : [ { + "type" : "httpTrigger", + "direction" : "in", + "name" : "req", + "route" : "{*path}", + "methods" : [ "GET", "POST", "HEAD", "PUT", "OPTIONS", "DELETE" ], + "dataType" : "binary", + "authLevel" : "ANONYMOUS" + }, { + "type" : "http", + "direction" : "out", + "name" : "$return" + } ] +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/host.json b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/host.json new file mode 100644 index 0000000000000..a8fe8c4c8e513 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/host.json @@ -0,0 +1,3 @@ +{ + "version": "2.0" +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/local.settings.json b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/local.settings.json new file mode 100644 index 0000000000000..8804ca029cf22 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/azure-config/local.settings.json @@ -0,0 +1,7 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "", + "FUNCTIONS_WORKER_RUNTIME": "java" + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/pom.tpl.qute.xml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/pom.tpl.qute.xml new file mode 100644 index 0000000000000..7416813782e0d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/pom.tpl.qute.xml @@ -0,0 +1,135 @@ + + + 1.9.1 + 3.1.0 + 11 + 11 + $\{artifactId}-{gen-info.time} + {app-region} + {resource-group} + {function} + $\{project.build.directory}/azure-functions/$\{functionAppName} + + + + + + com.microsoft.azure + azure-functions-maven-plugin + $\{azure.functions.maven.plugin.version} + + $\{functionResourceGroup} + $\{functionAppName} + $\{functionAppRegion} + + + windows + 11 + + + + + + + + + FUNCTIONS_EXTENSION_VERSION + ~3 + + + + + + org.apache.maven.plugins + maven-resources-plugin + $\{resources-plugin.version} + + + + copy-base-azure-config + package + + copy-resources + + + true + $\{stagingDirectory} + + + $\{project.basedir}/azure-config + + host.json + local.settings.json + + + + + + + + copy-function-json + install + + copy-resources + + + true + $\{stagingDirectory}/$\{function} + + + $\{project.basedir}/azure-config + true + + function.json + + + + + + + + copy-uberjar + install + + copy-resources + + + true + $\{stagingDirectory} + + + $\{project.build.directory} + + $\{project.artifactId}-$\{project.version}-runner.jar + + + + + + + + + + com.coderplus.maven.plugins + copy-rename-maven-plugin + 1.0.1 + + + rename-file + package + + copy + + + $\{project.build.directory}/$\{project.artifactId}-$\{project.version}-runner.jar + $\{stagingDirectory}/$\{project.artifactId}-$\{project.version}.jar + + + + + + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/src/main/resources/application.tpl.qute.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/src/main/resources/application.tpl.qute.yml new file mode 100644 index 0000000000000..0d3141362fddb --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/base/src/main/resources/application.tpl.qute.yml @@ -0,0 +1,5 @@ +quarkus: + http: + root-path: {root-context-path} + package: + type: uber-jar \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/codestart.yml new file mode 100644 index 0000000000000..bffb79cd5a7dc --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/azure-functions-http-example/codestart.yml @@ -0,0 +1,24 @@ +name: azure-functions-http-example +ref: azure-functions-http +type: code +tags: + - example + - singleton-example + - maven-only +metadata: + title: Azure Functions HTTP Integration example + description: This example contains a HTTP function ready for Azure. + related-guide-section: https://quarkus.io/guides/azure-functions-http +language: + base: + data: + # /api is the default root context azure functions will prepend + root-context-path: "/api" + app-region: + westus + resource-group: + java-functions-group + function: + greeting + dependencies: + - io.quarkus:quarkus-azure-functions-http diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..60f66233bb227 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/base/README.tpl.qute.md @@ -0,0 +1,3 @@ +{#include readme-header /} + +> :warning: **INCOMPATIBLE WITH DEV MODE**: Funqy Amazon Lambda Binding is not compatible with dev mode yet! diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/codestart.yml new file mode 100644 index 0000000000000..08ff591491429 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/codestart.yml @@ -0,0 +1,19 @@ +name: funqy-amazon-lambda-example +ref: funqy-amazon-lambda +type: code +tags: + - example + - singleton-example +metadata: + title: Funqy Amazon Lambda Binding example + description: This example contains a Quarkus Funqy Greeting Function ready for Amazon Lambda. + related-guide-section: https://quarkus.io/guides/funqy-amazon-lambda-http +language: + base: + shared-data: + supports: + dev-mode: false + dependencies: + - io.quarkus:quarkus-funqy-amazon-lambda + test-dependencies: + - io.quarkus:quarkus-test-amazon-lambda diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/payload.json b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/payload.json new file mode 100644 index 0000000000000..af392298f6be0 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/payload.json @@ -0,0 +1 @@ +{ "name" : "Bill" } \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/GreetingFunction.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/GreetingFunction.java new file mode 100644 index 0000000000000..e1d8a7f3a7bd4 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/GreetingFunction.java @@ -0,0 +1,11 @@ +package org.acme.funqy; + +import io.quarkus.funqy.Funq; + +public class GreetingFunction { + + @Funq + public String myFunqyGreeting(Person friend) { + return "Hello " + friend.getName(); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/Person.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/Person.java new file mode 100644 index 0000000000000..8c62c19a5c61a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/main/java/org/acme/funqy/Person.java @@ -0,0 +1,19 @@ +package org.acme.funqy; + +public class Person { + private String name; + + public Person() {} + + public Person(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java new file mode 100644 index 0000000000000..cce22690603da --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/org/acme/funqy/FunqyIT.java @@ -0,0 +1,10 @@ +package org.acme.funqy; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class FunqyIT extends FunqyTest { + + // Run the same tests + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/resources/application.properties b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/resources/application.properties new file mode 100644 index 0000000000000..7d1a703249226 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/native-test/java/resources/application.properties @@ -0,0 +1,3 @@ +quarkus.lambda.enable-polling-jvm-mode=true + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/java/org/acme/funqy/FunqyTest.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/java/org/acme/funqy/FunqyTest.java new file mode 100644 index 0000000000000..c689c8c74689a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/java/org/acme/funqy/FunqyTest.java @@ -0,0 +1,16 @@ +package org.acme.funqy; + +import io.quarkus.amazon.lambda.test.LambdaClient; +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +@QuarkusTest +public class FunqyTest { + @Test + public void testSimpleLambdaSuccess() throws Exception { + Person friend = new Person("Bill"); + String out = LambdaClient.invoke(String.class, friend); + Assertions.assertEquals("Hello Bill", out); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/resources/application.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/resources/application.yml new file mode 100644 index 0000000000000..6f64d4a18d957 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-amazon-lambda-example/java/src/test/resources/application.yml @@ -0,0 +1,3 @@ +quarkus.lambda.enable-polling-jvm-mode: true + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..d28b319d305bc --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/base/README.tpl.qute.md @@ -0,0 +1,8 @@ +{#include readme-header /} + +> :warning: **INCOMPATIBLE WITH DEV MODE**: Funqy Google Cloud Functions is not compatible with dev mode yet! + +Two examples have been generated under `src/main/java/org/acme/funqygooglecloudfunctions`, you must remove them before deploying to +Google Cloud Functions or setup multi-functions support, see https://quarkus.io/guides/funqy-gcp-functions#choose. + + diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/codestart.yml new file mode 100644 index 0000000000000..ec961f8eab675 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/codestart.yml @@ -0,0 +1,14 @@ +name: funqy-google-cloud-functions-example +ref: funqy-google-cloud-functions +type: code +tags: + - example + - singleton-example +metadata: + title: Funqy Google Cloud Functions Integration examples + description: Examples of Background Functions with Funqy for Quarkus. + related-guide-section: https://quarkus.io/guides/funqy-gcp-functions#choose +language: + base: + dependencies: + - io.quarkus:quarkus-funqy-google-cloud-functions diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingFunctions.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingFunctions.java new file mode 100644 index 0000000000000..45f2d91086c94 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingFunctions.java @@ -0,0 +1,26 @@ +package org.acme.funqygooglecloudfunctions; + +import javax.inject.Inject; + +import io.quarkus.funqy.Funq; +import io.quarkus.funqy.gcp.functions.event.PubsubMessage; +import io.quarkus.funqy.gcp.functions.event.StorageEvent; + +public class GreetingFunctions { + + @Inject + GreetingService service; + + @Funq + public void helloPubSubWorld(PubsubMessage pubSubEvent) { + String message = service.hello("world"); + System.out.println(pubSubEvent.messageId + " - " + message); + } + + @Funq + public void helloGCSWorld(StorageEvent storageEvent) { + String message = service.hello("world"); + System.out.println(storageEvent.name + " - " + message); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingService.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingService.java new file mode 100644 index 0000000000000..61ee2f8b2e4af --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingService.java @@ -0,0 +1,21 @@ +package org.acme.funqygooglecloudfunctions; + +import javax.enterprise.context.ApplicationScoped; + +@ApplicationScoped +public class GreetingService { + private String greeting = "Hello"; + private String punctuation = "!"; + + public void setGreeting(String greet) { + greeting = greet; + } + + public void setPunctuation(String punctuation) { + this.punctuation = punctuation; + } + + public String hello(String val) { + return greeting + " " + val + punctuation; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..6f2f738ed594e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/base/README.tpl.qute.md @@ -0,0 +1,6 @@ +{#include readme-header /} + +You'll need to read the guide to learn how to deploy and run this example within a full Knative Kubernetes or OpenShift +environment. + +This example is incomplete until you specify your docker.io account name within `src/main/k8s/funqy-service.yaml`. diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/codestart.yml new file mode 100644 index 0000000000000..9357d42994e22 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/codestart.yml @@ -0,0 +1,19 @@ +name: funqy-knative-events-example +ref: funqy-knative-events +type: code +tags: + - example + - singleton-example +metadata: + title: Funqy Knative Events Binding example + description: This example contains a Quarkus Knative Funqy Function ready for Kubernetes or Openshift. + related-guide-section: https://quarkus.io/guides/funqy-knative-events +language: + base: + data: + k8s-service-name: funqy-knative-events-codestart + docker-io-account: yourAccountName + dependencies: + - io.quarkus:quarkus-funqy-knative-events + test-dependencies: + - io.rest-assured:rest-assured diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/CloudEventGreeting.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/CloudEventGreeting.java new file mode 100644 index 0000000000000..7756cf108ad74 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/CloudEventGreeting.java @@ -0,0 +1,13 @@ +package org.acme.funqy.cloudevent; + +import io.quarkus.funqy.Funq; +import org.jboss.logging.Logger; + +public class CloudEventGreeting { + private static final Logger log = Logger.getLogger(CloudEventGreeting.class); + + @Funq + public void myCloudEventGreeting(Person input) { + log.info("Hello " + input.getName()); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/Person.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/Person.java new file mode 100644 index 0000000000000..bab8889318cff --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/java/org/acme/funqy/cloudevent/Person.java @@ -0,0 +1,19 @@ +package org.acme.funqy.cloudevent; + +public class Person { + private String name; + + public Person() {} + + public Person(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-service.tpl.qute.yaml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-service.tpl.qute.yaml new file mode 100644 index 0000000000000..e8aab28354388 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-service.tpl.qute.yaml @@ -0,0 +1,13 @@ +apiVersion: serving.knative.dev/v1 +kind: Service +metadata: + name: {k8s-service-name} +spec: + template: + metadata: + name: {k8s-service-name}-v1 + annotations: + autoscaling.knative.dev/target: "1" + spec: + containers: + - image: docker.io/{docker-io-account}/{k8s-service-name} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-trigger.tpl.qute.yaml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-trigger.tpl.qute.yaml new file mode 100644 index 0000000000000..ec88843f0718e --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/main/k8s/funqy-trigger.tpl.qute.yaml @@ -0,0 +1,13 @@ +apiVersion: eventing.knative.dev/v1alpha1 +kind: Trigger +metadata: + name: myCloudEventGreeting +spec: + filter: + attributes: + type: myCloudEventGreeting + subscriber: + ref: + apiVersion: serving.knative.dev/v1 + kind: Service + name: {k8s-service-name} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/native-test/java/org/acme/funqy/cloudevent/FunqyIT.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/native-test/java/org/acme/funqy/cloudevent/FunqyIT.java new file mode 100644 index 0000000000000..b80ffdb2372f7 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/native-test/java/org/acme/funqy/cloudevent/FunqyIT.java @@ -0,0 +1,10 @@ +package org.acme.funqy.cloudevent; + +import io.quarkus.test.junit.NativeImageTest; + +@NativeImageTest +public class FunqyIT extends FunqyTest { + + // Run the same tests + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/test/java/org/acme/funqy/cloudevent/FunqyTest.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/test/java/org/acme/funqy/cloudevent/FunqyTest.java new file mode 100644 index 0000000000000..38b42eacc8236 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/funqy-knative-events-example/java/src/test/java/org/acme/funqy/cloudevent/FunqyTest.java @@ -0,0 +1,28 @@ +package org.acme.funqy.cloudevent; + +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.RestAssured; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; + +import java.util.UUID; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +@QuarkusTest +public class FunqyTest { + + @Test + public void testCloudEvent() { + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "myCloudEventGreeting") + .header("ce-source", "test") + .body("{ \"name\": \"Bill\" }") + .post("/") + .then().statusCode(204); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/base/README.tpl.qute.md new file mode 100755 index 0000000000000..339d3818fc9d3 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/base/README.tpl.qute.md @@ -0,0 +1,6 @@ +{#include readme-header /} + +Two examples have been generated under `src/main/java/org/acme/googlecloudfunctions`, you must remove them before deploying to +Google Cloud Functions or setup multi-functions support, see https://quarkus.io/guides/gcp-functions#choose-your-function. + +> :warning: **INCOMPATIBLE WITH DEV MODE**: Google Cloud Functions is not compatible with dev mode yet! diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/codestart.yml new file mode 100755 index 0000000000000..9d433d9bbe576 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/codestart.yml @@ -0,0 +1,14 @@ +name: google-cloud-functions-example +ref: google-cloud-functions +type: code +tags: + - example + - singleton-example +metadata: + title: Google Cloud Functions Integration examples + description: Examples of Google Cloud HTTP and Background Functions for Quarkus. + related-guide-section: https://quarkus.io/guides/gcp-functions#choose-your-function +language: + base: + dependencies: + - io.quarkus:quarkus-google-cloud-functions diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldBackgroundFunction.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldBackgroundFunction.java new file mode 100755 index 0000000000000..5ab97f59ce029 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldBackgroundFunction.java @@ -0,0 +1,19 @@ +package org.acme.googlecloudfunctions; + +import javax.enterprise.context.ApplicationScoped; + +import com.google.cloud.functions.BackgroundFunction; +import com.google.cloud.functions.Context; + +@ApplicationScoped +public class HelloWorldBackgroundFunction implements BackgroundFunction { + + @Override + public void accept(StorageEvent event, Context context) throws Exception { + System.out.println("Receive event on file: " + event.name); + } + + public static class StorageEvent { + public String name; + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldHttpFunction.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldHttpFunction.java new file mode 100755 index 0000000000000..d1ada420e428d --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-example/java/src/main/java/org/acme/googlecloudfunctions/HelloWorldHttpFunction.java @@ -0,0 +1,19 @@ +package org.acme.googlecloudfunctions; + +import java.io.Writer; + +import javax.enterprise.context.ApplicationScoped; + +import com.google.cloud.functions.HttpFunction; +import com.google.cloud.functions.HttpRequest; +import com.google.cloud.functions.HttpResponse; + +@ApplicationScoped +public class HelloWorldHttpFunction implements HttpFunction { + + @Override + public void service(HttpRequest httpRequest, HttpResponse httpResponse) throws Exception { + Writer writer = httpResponse.getWriter(); + writer.write("Hello World"); + } +} \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/base/README.tpl.qute.md b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/base/README.tpl.qute.md new file mode 100644 index 0000000000000..db3cfce61dc98 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/base/README.tpl.qute.md @@ -0,0 +1,13 @@ +{#include readme-header /} + +Inside the `src/main/java/org/acme/googlecloudfunctionshttp` directory, you will find examples for: + +- JAX-RS (via RESTEasy): `GreetingResource.java` +- Vert.x reactive routes: `GreetingRoutes.java` +- Funqy HTTP: `GreetingFunqy` +- Servlet (via Undertow): `GreetingServlet.java` + +Each of these example uses a different extension. +If you don't plan to use all those extensions, you should remove them from the `pom.xml`. + +> :warning: **INCOMPATIBLE WITH DEV MODE**: Google Cloud Functions HTTP is not compatible with dev mode yet! diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/codestart.yml b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/codestart.yml new file mode 100644 index 0000000000000..0e452283e55af --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/codestart.yml @@ -0,0 +1,18 @@ +name: google-cloud-functions-http-example +ref: google-cloud-functions-http +type: code +tags: + - example + - singleton-example +metadata: + title: Google Cloud Functions HTTP Integration examples + description: Examples of Google Cloud HTTP functions for Quarkus written with RESTEasy (JAX-RS), Undertow (Servlet), Vert.x Web, or Funqy HTTP. + related-guide-section: https://quarkus.io/guides/gcp-functions-http#creating-the-endpoints +language: + base: + dependencies: + - io.quarkus:quarkus-google-cloud-functions-http + - io.quarkus:quarkus-resteasy + - io.quarkus:quarkus-vertx-web + - io.quarkus:quarkus-funqy-http + - io.quarkus:quarkus-undertow diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingFunqy.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingFunqy.java new file mode 100644 index 0000000000000..5b000b889fa0a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingFunqy.java @@ -0,0 +1,17 @@ +package org.acme.googlecloudfunctionshttp; + +import io.quarkus.funqy.Funq; +import io.smallrye.mutiny.Uni; + +public class GreetingFunqy { + + @Funq + public String funqy() { + return "Make it funqy"; + } + + @Funq + public Uni funqyAsync() { + return Uni.createFrom().item(() -> "Make it funqy asynchronously"); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingResource.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingResource.java new file mode 100644 index 0000000000000..8c58b0385fd55 --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingResource.java @@ -0,0 +1,49 @@ +package org.acme.googlecloudfunctionshttp; + +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +@Path("/hello") +public class GreetingResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "hello"; + } + + @POST + @Produces(MediaType.TEXT_PLAIN) + @Consumes(MediaType.TEXT_PLAIN) + public String hello(String name) { + return "hello " + name; + } + + @POST + @Produces(MediaType.APPLICATION_OCTET_STREAM) + @Consumes(MediaType.APPLICATION_OCTET_STREAM) + public byte[] hello(byte[] bytes) { + if (bytes[0] != 0 || bytes[1] != 1 || bytes[2] != 2 || bytes[3] != 3) { + throw new RuntimeException("bad input"); + } + byte[] rtn = { 4, 5, 6 }; + return rtn; + } + + @POST + @Path("empty") + public void empty() { + + } + + @GET + @Path("error") + public void error() { + throw new RuntimeException("Oups!"); + } + +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingRoutes.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingRoutes.java new file mode 100644 index 0000000000000..1de2bf1cee85f --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingRoutes.java @@ -0,0 +1,20 @@ +package org.acme.googlecloudfunctionshttp; + +import static io.vertx.core.http.HttpMethod.GET; + +import io.quarkus.vertx.web.Route; +import io.vertx.ext.web.RoutingContext; + +public class GreetingRoutes { + @Route(path = "/vertx/hello", methods = GET) + void hello(RoutingContext context) { + context.response().headers().set("Content-Type", "text/plain"); + context.response().setStatusCode(200).end("hello"); + } + + @Route(path = "/vertx/error", methods = GET) + void error(RoutingContext context) { + context.response().headers().set("Content-Type", "text/plain"); + context.response().setStatusCode(500).end("Oups!"); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingServlet.java b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingServlet.java new file mode 100644 index 0000000000000..31f9254c4a44f --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/codestarts/quarkus/singleton-examples/google-cloud-functions-http-example/java/src/main/java/org/acme/googlecloudfunctions/GreetingServlet.java @@ -0,0 +1,27 @@ +package org.acme.googlecloudfunctionshttp; + +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.annotation.WebServlet; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +@WebServlet(name = "ServletGreeting", urlPatterns = "/servlet/hello") +public class GreetingServlet extends HttpServlet { + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + resp.setStatus(200); + resp.addHeader("Content-Type", "text/plain"); + resp.getWriter().write("hello"); + } + + @Override + protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + String name = req.getReader().readLine(); + resp.setStatus(200); + resp.addHeader("Content-Type", "text/plain"); + resp.getWriter().write("hello " + name); + } +} diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/gradle.properties-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/gradle.properties-template.ftl index ae1d466f986bb..c89e173e9ad97 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/gradle.properties-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/gradle.properties-template.ftl @@ -1,5 +1,5 @@ quarkusPlatformGroupId = ${bom_groupId} quarkusPlatformArtifactId = ${bom_artifactId} quarkusPlatformVersion = ${bom_version} -quarkusPluginVersion = ${plugin_version} +quarkusPluginVersion = ${gradle_plugin_version} org.gradle.logging.level=INFO diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/pom.xml-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/pom.xml-template.ftl index 064317fd23a62..6920a67e4132e 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/pom.xml-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/pom.xml-template.ftl @@ -15,7 +15,7 @@ ${bom_artifactId} ${bom_groupId} ${bom_version} - ${plugin_version} + ${maven_plugin_version} ${compiler_plugin_version} ${surefire_plugin_version} @@ -55,12 +55,15 @@ - ${plugin_groupId} - ${plugin_artifactId} + ${maven_plugin_groupId} + ${maven_plugin_artifactId} ${quarkus-plugin.version} + true + generate-code + generate-code-tests build diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/settings.gradle-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/settings.gradle-template.ftl index 6687873c9483b..61fd4debce633 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/settings.gradle-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/java/settings.gradle-template.ftl @@ -1,8 +1,8 @@ pluginManagement { repositories { - mavenLocal()${maven_plugin_repositories} + mavenLocal() mavenCentral() - gradlePluginPortal() + gradlePluginPortal()${maven_plugin_repositories} } plugins { id 'io.quarkus' version "${quarkusPluginVersion}" diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/gradle.properties-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/gradle.properties-template.ftl index ae1d466f986bb..c89e173e9ad97 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/gradle.properties-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/gradle.properties-template.ftl @@ -1,5 +1,5 @@ quarkusPlatformGroupId = ${bom_groupId} quarkusPlatformArtifactId = ${bom_artifactId} quarkusPlatformVersion = ${bom_version} -quarkusPluginVersion = ${plugin_version} +quarkusPluginVersion = ${gradle_plugin_version} org.gradle.logging.level=INFO diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/pom.xml-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/pom.xml-template.ftl index 1e9816b38147e..8132da629b747 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/pom.xml-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/pom.xml-template.ftl @@ -15,7 +15,7 @@ ${bom_artifactId} ${bom_groupId} ${bom_version} - ${plugin_version} + ${maven_plugin_version} ${compiler_plugin_version} ${surefire_plugin_version} ${kotlin_version} @@ -62,12 +62,14 @@ src/test/kotlin - ${plugin_groupId} - ${plugin_artifactId} + ${maven_plugin_groupId} + ${maven_plugin_artifactId} ${quarkus-plugin.version} + generate-code + generate-code-tests build diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/settings.gradle-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/settings.gradle-template.ftl index 6687873c9483b..61fd4debce633 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/settings.gradle-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/kotlin/settings.gradle-template.ftl @@ -1,8 +1,8 @@ pluginManagement { repositories { - mavenLocal()${maven_plugin_repositories} + mavenLocal() mavenCentral() - gradlePluginPortal() + gradlePluginPortal()${maven_plugin_repositories} } plugins { id 'io.quarkus' version "${quarkusPluginVersion}" diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/gradle.properties-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/gradle.properties-template.ftl index ae1d466f986bb..c89e173e9ad97 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/gradle.properties-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/gradle.properties-template.ftl @@ -1,5 +1,5 @@ quarkusPlatformGroupId = ${bom_groupId} quarkusPlatformArtifactId = ${bom_artifactId} quarkusPlatformVersion = ${bom_version} -quarkusPluginVersion = ${plugin_version} +quarkusPluginVersion = ${gradle_plugin_version} org.gradle.logging.level=INFO diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/pom.xml-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/pom.xml-template.ftl index 26faada923b7e..d0fa54ab61adf 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/pom.xml-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/pom.xml-template.ftl @@ -15,7 +15,7 @@ ${bom_artifactId} ${bom_groupId} ${bom_version} - ${plugin_version} + ${maven_plugin_version} ${compiler_plugin_version} ${surefire_plugin_version} ${scala_version} @@ -68,12 +68,14 @@ src/test/scala - ${plugin_groupId} - ${plugin_artifactId} + ${maven_plugin_groupId} + ${maven_plugin_artifactId} ${quarkus-plugin.version} + generate-code + generate-code-tests build diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/settings.gradle-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/settings.gradle-template.ftl index 6687873c9483b..61fd4debce633 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/settings.gradle-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/settings.gradle-template.ftl @@ -1,8 +1,8 @@ pluginManagement { repositories { - mavenLocal()${maven_plugin_repositories} + mavenLocal() mavenCentral() - gradlePluginPortal() + gradlePluginPortal()${maven_plugin_repositories} } plugins { id 'io.quarkus' version "${quarkusPluginVersion}" diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/test-resource-template.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/test-resource-template.ftl index 11246ffe8584d..eaceeebedaeff 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/test-resource-template.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/basic-rest/scala/test-resource-template.ftl @@ -9,7 +9,7 @@ import org.junit.jupiter.api.Test class ${class_name}Test { @Test - def testHelloEndpoint() = { + def testHelloEndpoint(): Unit = { given() .`when`().get("${path}") .then() diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-fast-jar.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-fast-jar.ftl deleted file mode 100644 index 832bc8b626197..0000000000000 --- a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-fast-jar.ftl +++ /dev/null @@ -1,57 +0,0 @@ -#### -# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode -# -# Before building the docker image run: -# -# mvn package -# -# Then, build the image with: -# -# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/${project_artifactId}-jvm . -# -# Then run the container using: -# -# docker run -i --rm -p 8080:8080 quarkus/${project_artifactId}-jvm -# -# If you want to include the debug port into your docker image -# you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050 -# -# Then run the container using : -# -# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/${project_artifactId}-jvm -# -### -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.1 - -ARG JAVA_PACKAGE=java-11-openjdk-headless -ARG RUN_JAVA_VERSION=1.3.8 - -ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' - -# Install java and the run-java script -# Also set up permissions for user `1001` -RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ - && microdnf update \ - && microdnf clean all \ - && mkdir /deployments \ - && chown 1001 /deployments \ - && chmod "g+rwX" /deployments \ - && chown 1001:root /deployments \ - && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \ - && chown 1001 /deployments/run-java.sh \ - && chmod 540 /deployments/run-java.sh \ - && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/lib/security/java.security - -# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. -ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" - -# We make four distinct layers so if there are application changes the library layers can be re-used -COPY --chown=1001 ${build_dir}/quarkus-app/lib/ /deployments/lib/ -COPY --chown=1001 ${build_dir}/quarkus-app/*.jar /deployments/ -COPY --chown=1001 ${build_dir}/quarkus-app/app/ /deployments/app/ -COPY --chown=1001 ${build_dir}/quarkus-app/quarkus/ /deployments/quarkus/ - -EXPOSE 8080 -USER 1001 - -ENTRYPOINT [ "/deployments/run-java.sh" ] diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-jvm.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-jvm.ftl index c74c433819b05..39a3e02582e03 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-jvm.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-jvm.ftl @@ -1,27 +1,27 @@ #### # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode # -# Before building the docker image run: +# Before building the container image run: # -# mvn package +# mvn package -Dquarkus.package.type=fast-jar # # Then, build the image with: # -# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/${project_artifactId}-jvm . +# docker build -f src/main/docker/Dockerfile.fast-jar -t quarkus/${project_artifactId}-fast-jar . # # Then run the container using: # -# docker run -i --rm -p 8080:8080 quarkus/${project_artifactId}-jvm +# docker run -i --rm -p 8080:8080 quarkus/${project_artifactId}-fast-jar # # If you want to include the debug port into your docker image # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050 # # Then run the container using : # -# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/${project_artifactId}-jvm +# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/${project_artifactId}-fast-jar # ### -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.1 +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 ARG JAVA_PACKAGE=java-11-openjdk-headless ARG RUN_JAVA_VERSION=1.3.8 @@ -45,8 +45,11 @@ RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" -COPY ${build_dir}/lib/* /deployments/lib/ -COPY ${build_dir}/*-runner.jar /deployments/app.jar +# We make four distinct layers so if there are application changes the library layers can be re-used +COPY --chown=1001 ${build_dir}/quarkus-app/lib/ /deployments/lib/ +COPY --chown=1001 ${build_dir}/quarkus-app/*.jar /deployments/ +COPY --chown=1001 ${build_dir}/quarkus-app/app/ /deployments/app/ +COPY --chown=1001 ${build_dir}/quarkus-app/quarkus/ /deployments/quarkus/ EXPOSE 8080 USER 1001 diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-legacy-jar.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-legacy-jar.ftl new file mode 100644 index 0000000000000..704c5073d52cb --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-legacy-jar.ftl @@ -0,0 +1,54 @@ +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode +# +# Before building the container image run: +# +# mvn package +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/${project_artifactId}-jvm . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/${project_artifactId}-jvm +# +# If you want to include the debug port into your docker image +# you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050 +# +# Then run the container using : +# +# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/${project_artifactId}-jvm +# +### +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 + +ARG JAVA_PACKAGE=java-11-openjdk-headless +ARG RUN_JAVA_VERSION=1.3.8 + +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' + +# Install java and the run-java script +# Also set up permissions for user `1001` +RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ + && microdnf update \ + && microdnf clean all \ + && mkdir /deployments \ + && chown 1001 /deployments \ + && chmod "g+rwX" /deployments \ + && chown 1001:root /deployments \ + && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \ + && chown 1001 /deployments/run-java.sh \ + && chmod 540 /deployments/run-java.sh \ + && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/lib/security/java.security + +# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. +ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" + +COPY ${build_dir}/lib/* /deployments/lib/ +COPY ${build_dir}/*-runner.jar /deployments/app.jar + +EXPOSE 8080 +USER 1001 + +ENTRYPOINT [ "/deployments/run-java.sh" ] diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native-distroless.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native-distroless.ftl new file mode 100644 index 0000000000000..4ba08903ff84a --- /dev/null +++ b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native-distroless.ftl @@ -0,0 +1,23 @@ +#### +# This Dockerfile is used in order to build a distroless container that runs the Quarkus application in native (no JVM) mode +# +# Before building the container image run: +# +# mvn package -Pnative -Dquarkus.native.container-build=true +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.native-distroless -t quarkus/${project_artifactId} . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/${project_artifactId} +# +### +FROM quay.io/quarkus/quarkus-distroless-image:1.0 +COPY ${build-dir}/*-runner /application + +EXPOSE 8080 +USER nonroot + +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] \ No newline at end of file diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native.ftl index 4e3a77944bd67..a9fe968e96d48 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/dockerfile-native.ftl @@ -1,7 +1,7 @@ #### # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode # -# Before building the docker image run: +# Before building the container image run: # # mvn package -Pnative -Dquarkus.native.container-build=true # @@ -14,7 +14,7 @@ # docker run -i --rm -p 8080:8080 quarkus/${project_artifactId} # ### -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.1 +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 WORKDIR /work/ RUN chown 1001 /work \ && chmod "g+rwX" /work \ diff --git a/devtools/platform-descriptor-json/src/main/resources/templates/gitignore.ftl b/devtools/platform-descriptor-json/src/main/resources/templates/gitignore.ftl index 667fae3d7021e..38db6efa816ad 100644 --- a/devtools/platform-descriptor-json/src/main/resources/templates/gitignore.ftl +++ b/devtools/platform-descriptor-json/src/main/resources/templates/gitignore.ftl @@ -27,4 +27,8 @@ nb-configuration.xml # patch *.orig *.rej + +# Local environment +.env + ${additional_gitignore_entries} diff --git a/devtools/platform-descriptor-json/src/test/java/io/quarkus/platform/descriptor/tests/PlatformDescriptorLoaderTest.java b/devtools/platform-descriptor-json/src/test/java/io/quarkus/platform/descriptor/tests/PlatformDescriptorLoaderTest.java deleted file mode 100644 index d3cab409c8266..0000000000000 --- a/devtools/platform-descriptor-json/src/test/java/io/quarkus/platform/descriptor/tests/PlatformDescriptorLoaderTest.java +++ /dev/null @@ -1,76 +0,0 @@ -package io.quarkus.platform.descriptor.tests; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.function.Function; - -import org.apache.maven.model.Dependency; -import org.junit.jupiter.api.Test; - -import io.quarkus.platform.descriptor.loader.json.ArtifactResolver; -import io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoader; -import io.quarkus.platform.descriptor.loader.json.QuarkusJsonPlatformDescriptorLoaderContext; -import io.quarkus.platform.descriptor.loader.json.impl.QuarkusJsonPlatformDescriptor; -import io.quarkus.platform.descriptor.loader.json.impl.QuarkusJsonPlatformDescriptorLoaderImpl; - -class PlatformDescriptorLoaderTest { - - @Test - void test() { - - QuarkusJsonPlatformDescriptorLoader qpd = new QuarkusJsonPlatformDescriptorLoaderImpl(); - - final ArtifactResolver artifactResolver = new ArtifactResolver() { - - @Override - public T process(String groupId, String artifactId, String classifier, String type, String version, - Function processor) { - throw new UnsupportedOperationException(); - } - - @Override - public List getManagedDependencies(String groupId, String artifactId, String classifier, - String type, String version) { - return Collections.emptyList(); - } - }; - - QuarkusJsonPlatformDescriptorLoaderContext context = new QuarkusJsonPlatformDescriptorLoaderContext(artifactResolver) { - @Override - public T parseJson(Function parser) { - String resourceName = "fakeextensions.json"; - - final InputStream is = getClass().getClassLoader().getResourceAsStream(resourceName); - if (is == null) { - throw new IllegalStateException("Failed to locate " + resourceName + " on the classpath"); - } - - try { - return parser.apply(is); - } finally { - try { - is.close(); - } catch (IOException e) { - } - } - } - - }; - - QuarkusJsonPlatformDescriptor load = qpd.load(context); - - assertNotNull(load); - - assertEquals(85, load.getExtensions().size()); - - assertEquals(1, load.getCategories().size()); - - } - -} diff --git a/devtools/platform-descriptor-json/src/test/resources/fakeextensions.json b/devtools/platform-descriptor-json/src/test/resources/fakeextensions.json deleted file mode 100644 index 9db862bcad79a..0000000000000 --- a/devtools/platform-descriptor-json/src/test/resources/fakeextensions.json +++ /dev/null @@ -1,1217 +0,0 @@ - -{ - "bom": { - "group-id": "io.quarkus", - "artifact-id": "quarkus-bom", - "version": "999-SNAPSHOT" - }, - - "categories": [ - { - "name" : "Web", - "id" : "web", - "description": "web is webby", - "metadata": { - "pinned": [ - "x.y.z", - "1.2.3" - ] - } - } - ], - - "extensions": [ - { - "name": "Quarkus - Core", - "metadata": { - "keywords": [ - ] - }, - "short-name": "wonka", - "group-id": "io.quarkus", - "artifact-id": "quarkus-core", - "version": "999-SNAPSHOT", - "description": "Build parent to bring in required dependencies" - }, - { - "name": "ArC", - "short-name": "CDI", - "guide": "https://quarkus.io/guides/cdi-reference", - "metadata": { - "keywords": [ - "arc", - "cdi", - "dependency-injection", - "di" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-arc", - "version": "999-SNAPSHOT", - "description": "Build time CDI dependency injection" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-caffeine", - "version": "999-SNAPSHOT", - "name": "Quarkus - Caffeine - Runtime", - "description": "A high performance caching library for Java 8+" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jaxb", - "version": "999-SNAPSHOT", - "name": "Quarkus - JAXB - Runtime", - "description": "XML serialization support" - }, - { - "name": "Jackson", - "metadata": { - "keywords": [ - "jackson", - "json" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jackson", - "version": "999-SNAPSHOT", - "description": "Jackson Databind support" - }, - { - "name": "JSON-B", - "guide": "https://quarkus.io/guides/rest-json", - "metadata": { - "keywords": [ - "jsonb", - "json-b", - "json" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jsonb", - "version": "999-SNAPSHOT", - "description": "JSON Binding support" - }, - { - "name": "JSON-P", - "metadata": { - "keywords": [ - "jsonp", - "json-p", - "json" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jsonp", - "version": "999-SNAPSHOT", - "description": "JSON Processing support" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-netty", - "version": "999-SNAPSHOT", - "name": "Quarkus - Netty - Runtime", - "description": "Netty is a non-blocking I/O client-server framework. Used by Quarkus as foundation layer." - }, - { - "name": "Agroal - Database connection pool", - "metadata": { - "keywords": [ - "agroal", - "database-connection-pool" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-agroal", - "version": "999-SNAPSHOT", - "description": "Pool your database connections (included in Hibernate ORM)" - }, - { - "name": "Artemis Core", - "metadata": { - "keywords": [ - "artemis-core", - "artemis" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-artemis-core", - "version": "999-SNAPSHOT", - "description": "Use ActiveMQ Artemis as message broker" - }, - { - "name": "Artemis JMS", - "metadata": { - "keywords": [ - "artemis-jms", - "artemis" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-artemis-jms", - "version": "999-SNAPSHOT", - "description": "Use ActiveMQ Artemis as a JMS implementation" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-elasticsearch-rest-client", - "version": "999-SNAPSHOT", - "name": "Quarkus - Elasticsearch REST client - Runtime", - "description": "Elasticsearch REST client" - }, - { - "name": "Jaeger", - "metadata": { - "keywords": [ - "jaeger" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-security", - "version": "999-SNAPSHOT", - "description": "Security" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-elytron-security", - "version": "999-SNAPSHOT", - "name": "Quarkus - Elytron Security - Runtime", - "description": "Secure your services" - }, - { - "name": "Properties File based Security", - "guide": "https://quarkus.io/guides/security-properties", - "metadata": { - "keywords": [ - "security" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-elytron-security-properties-file", - "version": "999-SNAPSHOT", - "description": "Secure your applications using properties files" - }, - { - "name": "Elytron Security OAuth 2.0", - "metadata": { - "keywords": [ - "security", - "oauth2" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-elytron-security-oauth2", - "version": "999-SNAPSHOT", - "description": "Secure your applications with OAuth2 opaque tokens" - }, - { - "name": "OpenID Connect", - "guide": "https://quarkus.io/guides/security-openid-connect", - "metadata": { - "keywords": [ - "oauth2", - "openid-connect" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-oidc", - "version": "999-SNAPSHOT", - "description": "Secure your applications with OpenID Connect and Keycloak" - }, - { - "name": "Flyway", - "guide": "https://quarkus.io/guides/flyway", - "metadata": { - "keywords": [ - "flyway", - "database", - "data" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-flyway", - "version": "999-SNAPSHOT", - "description": "Handle your database schema migrations" - }, - { - "name": "Hibernate ORM", - "short-name": "JPA", - "guide": "https://quarkus.io/guides/hibernate-orm", - "metadata": { - "keywords": [ - "hibernate-orm", - "jpa", - "hibernate" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-hibernate-orm", - "version": "999-SNAPSHOT", - "description": "Define your persistent model with Hibernate ORM and JPA" - }, - { - "name": "Hibernate ORM with Panache", - "guide": "https://quarkus.io/guides/hibernate-orm-panache", - "metadata": { - "keywords": [ - "hibernate-orm-panache", - "panache", - "hibernate", - "jpa" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-hibernate-orm-panache", - "version": "999-SNAPSHOT", - "description": "Define your persistent model in Hibernate ORM with Panache" - }, - { - "name": "MongoDB with Panache", - "metadata": { - "keywords": [ - "mongo", - "mongodb", - "nosql", - "datastore", - "panache" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-mongodb-panache", - "version": "999-SNAPSHOT", - "description": "Use an active record or repository pattern with MongoDB" - }, - { - "name": "Hibernate Search + Elasticsearch", - "guide": "https://quarkus.io/guides/hibernate-search-elasticsearch", - "metadata": { - "keywords": [ - "hibernate-search-elasticsearch", - "search", - "full-text", - "hibernate", - "elasticsearch" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-hibernate-search-elasticsearch", - "version": "999-SNAPSHOT", - "description": "Automatically index your Hibernate entities in Elasticsearch" - }, - { - "name": "Hibernate Validator", - "short-name": "bean validation", - "guide": "https://quarkus.io/guides/validation", - "metadata": { - "keywords": [ - "hibernate-validator", - "bean-validation", - "validation" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-hibernate-validator", - "version": "999-SNAPSHOT", - "description": "Validate data coming to your REST endpoints" - }, - { - "name": "Infinispan Client", - "guide": "https://quarkus.io/guides/infinispan-client", - "metadata": { - "keywords": [ - "infinispan-client", - "data-grid-client", - "infinispan" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-infinispan-client", - "version": "999-SNAPSHOT", - "description": "Connect to the Infinispan data grid for distributed caching" - }, - { - "name": "Infinispan Embedded", - "metadata": { - "keywords": [ - "infinispan" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-infinispan-embedded", - "version": "999-SNAPSHOT", - "description": "Run an embedded Infinispan data grid server for distributed caching" - }, - { - "name": "Security", - "metadata": { - "keywords": [ - "security" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jaeger", - "version": "999-SNAPSHOT", - "description": "Trace your services with Jaeger" - }, - { - "name": "JDBC Driver - PostgreSQL", - "metadata": { - "keywords": [ - "jdbc-postgresql", - "jdbc", - "postgresql" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-postgresql", - "version": "999-SNAPSHOT", - "description": "PostgreSQL database connector" - }, - { - "name": "JDBC Driver - H2", - "metadata": { - "keywords": [ - "jdbc-h2", - "jdbc", - "h2" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-h2", - "version": "999-SNAPSHOT", - "description": "H2 database connector" - }, - { - "name": "JDBC Driver - MariaDB", - "metadata": { - "keywords": [ - "jdbc-mariadb", - "jdbc", - "mariadb" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-mariadb", - "version": "999-SNAPSHOT", - "description": "MariaDB database connector" - }, - { - "name": "JDBC Driver - Microsoft SQL Server", - "metadata": { - "keywords": [ - "jdbc-mssql", - "jdbc", - "mssql", - "sql-server" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-mssql", - "version": "999-SNAPSHOT", - "description": "Microsoft SQL Server database connector" - }, - { - "name": "JDBC Driver - MySQL", - "metadata": { - "keywords": [ - "jdbc-mysql", - "jdbc", - "mysql" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-mysql", - "version": "999-SNAPSHOT", - "description": "MySQL database connector" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jdbc-derby", - "version": "999-SNAPSHOT", - "name": "Quarkus - JDBC - Derby - Runtime", - "description": "Derby database connector" - }, - { - "name": "Apache Kafka Client", - "metadata": { - "keywords": [ - "kafka" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kafka-client", - "version": "999-SNAPSHOT", - "description": "A client for Apache Kafka" - }, - { - "name": "Apache Kafka Streams", - "metadata": { - "keywords": [ - "kafka", - "kafka-streams" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kafka-streams", - "version": "999-SNAPSHOT", - "description": "Implement stream processing applications based on Apache Kafka" - }, - { - "name": "SmallRye Health", - "short-name": "health", - "guide": "https://quarkus.io/guides/microprofile-health", - "metadata": { - "keywords": [ - "smallrye-health", - "health-check", - "health", - "microprofile-health", - "microprofile-health-check" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-health", - "version": "999-SNAPSHOT", - "description": "Monitor service health" - }, - { - "name": "SmallRye JWT", - "guide": "https://quarkus.io/guides/security-jwt", - "metadata": { - "keywords": [ - "smallrye-jwt", - "jwt", - "json-web-token", - "rbac" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-jwt", - "version": "999-SNAPSHOT", - "description": "Secure your applications with JSON Web Token" - }, - { - "name": "SmallRye Context Propagation", - "short-name": "context propagation", - "metadata": { - "keywords": [ - "smallrye-context-propagation", - "microprofile-context-propagation", - "context-propagation", - "context", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-context-propagation", - "version": "999-SNAPSHOT", - "description": "SmallRye Context Propagation" - }, - { - "name": "SmallRye Reactive Streams Operators", - "short-name": "reactive streams", - "metadata": { - "keywords": [ - "smallrye-reactive-streams-operators", - "smallrye-reactive-streams", - "reactive-streams-operators", - "reactive-streams", - "microprofile-reactive-streams", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-streams-operators", - "version": "999-SNAPSHOT", - "description": "Operators for Reactive Streams programming" - }, - { - "name": "SmallRye Reactive Type Converters", - "metadata": { - "keywords": [ - "smallrye-reactive-type-converters", - "reactive-type-converters", - "reactive-streams-operators", - "reactive-streams", - "microprofile-reactive-streams", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-type-converters", - "version": "999-SNAPSHOT", - "description": "Converters for reactive types from various reactive programming libraries" - }, - { - "name": "SmallRye Reactive Messaging", - "guide": "https://quarkus.io/guides/reactive-messaging", - "metadata": { - "keywords": [ - "smallrye-reactive-messaging", - "reactive-messaging", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-messaging", - "version": "999-SNAPSHOT", - "description": "Asynchronous messaging for Reactive Streams" - }, - { - "name": "SmallRye Reactive Messaging - Kafka Connector", - "short-name": "kafka", - "guide": "https://quarkus.io/guides/kafka", - "metadata": { - "keywords": [ - "kafka", - "reactive-kafka" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-messaging-kafka", - "version": "999-SNAPSHOT", - "description": "Kafka reactive messaging connector" - }, - { - "name": "SmallRye Reactive Messaging - AMQP Connector", - "guide": "https://quarkus.io/guides/amqp", - "metadata": { - "keywords": [ - "amqp", - "reactive-amqp" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-messaging-amqp", - "version": "999-SNAPSHOT", - "description": "AMQP reactive messaging connector" - }, - { - "name": "SmallRye Reactive Messaging - MQTT Connector", - "metadata": { - "keywords": [ - "mqtt", - "reactive-mqtt" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-reactive-messaging-mqtt", - "version": "999-SNAPSHOT", - "description": "MQTT reactive messaging connector" - }, - { - "name": "SmallRye Metrics", - "short-name": "metrics", - "guide": "https://quarkus.io/guides/microprofile-metrics", - "metadata": { - "keywords": [ - "smallrye-metrics", - "metrics", - "metric", - "prometheus", - "monitoring" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-metrics", - "version": "999-SNAPSHOT", - "description": "Extract metrics out of your services" - }, - { - "name": "SmallRye OpenAPI", - "guide": "https://quarkus.io/guides/openapi-swaggerui", - "metadata": { - "keywords": [ - "smallrye-openapi", - "openapi", - "open-api" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-openapi", - "version": "999-SNAPSHOT", - "description": "Document your REST APIs with OpenAPI - comes with Swagger UI" - }, - { - "name": "SmallRye OpenTracing", - "guide": "https://quarkus.io/guides/opentracing", - "metadata": { - "keywords": [ - "smallrye-opentracing", - "opentracing", - "tracing", - "distributed-tracing", - "jaeger" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-opentracing", - "version": "999-SNAPSHOT", - "description": "Trace your services with Jaeger" - }, - { - "name": "REST Client", - "guide": "https://quarkus.io/guides/rest-client", - "metadata": { - "keywords": [ - "rest-client", - "web-client", - "microprofile-rest-client" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-rest-client", - "version": "999-SNAPSHOT", - "description": "Call REST services" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy-common", - "version": "999-SNAPSHOT", - "name": "Quarkus - RESTEasy - Common - Runtime", - "description": "REST framework implementing JAX-RS and more" - }, - { - "name": "RESTEasy JAX-RS", - "short-name": "jax-rs", - "guide": "https://quarkus.io/guides/rest-json", - "metadata": { - "keywords": [ - "resteasy", - "jaxrs", - "web", - "rest" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy", - "version": "999-SNAPSHOT", - "description": "REST framework implementing JAX-RS and more" - }, - { - "name": "RESTEasy Jackson", - "metadata": { - "keywords": [ - "resteasy-jackson", - "jaxrs-json", - "resteasy-json", - "resteasy", - "jaxrs", - "json", - "jackson" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy-jackson", - "version": "999-SNAPSHOT", - "description": "Jackson serialization support for RESTEasy" - }, - { - "name": "RESTEasy JSON-B", - "guide": "https://quarkus.io/guides/rest-json", - "metadata": { - "keywords": [ - "resteasy-jsonb", - "jaxrs-json", - "resteasy-json", - "resteasy", - "jaxrs", - "json", - "jsonb" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy-jsonb", - "version": "999-SNAPSHOT", - "description": "JSON-B serialization support for RESTEasy" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy-jaxb", - "version": "999-SNAPSHOT", - "name": "Quarkus - RESTEasy - JAXB - Runtime", - "description": "XML serialization support for RESTEasy" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-resteasy-server-common", - "version": "999-SNAPSHOT", - "name": "Quarkus - RESTEasy - Server common - Runtime", - "description": "RESTEasy Server common" - }, - { - "name": "Narayana JTA - Transaction manager", - "guide": "https://quarkus.io/guides/transaction", - "metadata": { - "keywords": [ - "narayana-jta", - "narayana", - "jta", - "transactions", - "transaction", - "tx", - "txs" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-narayana-jta", - "version": "999-SNAPSHOT", - "description": "JTA transaction support (included in Hibernate ORM)" - }, - { - "name": "Undertow Servlet", - "short-name": "servlet", - "metadata": { - "keywords": [ - "undertow", - "servlet" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-undertow", - "version": "999-SNAPSHOT", - "description": "Support for servlets" - }, - { - "name": "SmallRye Fault Tolerance", - "metadata": { - "keywords": [ - "smallrye-fault-tolerance", - "fault-tolerance", - "microprofile-fault-tolerance", - "circuit-breaker", - "bulkhead" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-smallrye-fault-tolerance", - "version": "999-SNAPSHOT", - "description": "Define fault-tolerant services" - }, - { - "name": "Eclipse Vert.x - Core", - "metadata": { - "keywords": [ - "eclipse-vert.x", - "vertx", - "vert.x", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-vertx-core", - "version": "999-SNAPSHOT", - "description": "Vert.x Core" - }, - { - "name": "Eclipse Vert.x", - "guide": "https://quarkus.io/guides/vertx", - "metadata": { - "keywords": [ - "eclipse-vert.x", - "vertx", - "vert.x", - "reactive" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-vertx", - "version": "999-SNAPSHOT", - "description": "Reactive application toolkit" - }, - { - "name": "Eclipse Vert.x - HTTP", - "metadata": { - "keywords": [ - "eclipse-vert.x", - "vertx", - "vert.x", - "reactive", - "vertx-http" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-vertx-http", - "version": "999-SNAPSHOT", - "description": "Vert.x HTTP" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-vertx-web", - "version": "999-SNAPSHOT", - "name": "Quarkus - Vert.x Web - Runtime", - "description": "Vert.x Web" - }, - { - "name": "Reactive PostgreSQL client", - "metadata": { - "keywords": [ - "eclipse-vert.x", - "vertx", - "vert.x", - "reactive", - "database", - "data", - "postgresql" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-reactive-pg-client", - "version": "999-SNAPSHOT", - "description": "A reactive client for the PostgreSQL database" - }, - { - "name": "Reactive MySQL client", - "metadata": { - "keywords": [ - "eclipse-vert.x", - "vertx", - "vert.x", - "reactive", - "database", - "data", - "mysql" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-reactive-mysql-client", - "version": "999-SNAPSHOT", - "description": "A reactive client for the MySQL database" - }, - { - "name": "Mailer", - "guide": "https://quarkus.io/guides/mailer", - "metadata": { - "keywords": [ - "mail", - "mailer" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-mailer", - "version": "999-SNAPSHOT", - "description": "Send emails" - }, - { - "name": "MongoDB client", - "metadata": { - "keywords": [ - "mongo", - "mongodb", - "nosql", - "datastore" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-mongodb-client", - "version": "999-SNAPSHOT", - "description": "An imperative and reactive client for MongoDB" - }, - { - "name": "Undertow WebSockets", - "short-name": "websockets", - "guide": "https://quarkus.io/guides/websockets", - "metadata": { - "keywords": [ - "undertow-websockets", - "undertow-websocket", - "websocket", - "websockets", - "web-socket", - "web-sockets" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-undertow-websockets", - "version": "999-SNAPSHOT", - "description": "WebSocket support" - }, - { - "name": "Scheduler - tasks", - "guide": "https://quarkus.io/guides/scheduler", - "metadata": { - "keywords": [ - "scheduler", - "tasks", - "periodic-tasks" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-scheduler", - "version": "999-SNAPSHOT", - "description": "Schedule jobs and tasks" - }, - { - "name": "Quarkus Extension for Spring DI API", - "guide": "https://quarkus.io/guides/spring-di", - "metadata": { - "keywords": [ - "spring-di" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-spring-di", - "version": "999-SNAPSHOT", - "description": "Define your dependency injection with Spring DI" - }, - { - "name": "Quarkus Extension for Spring Web API", - "guide": "https://quarkus.io/guides/spring-web", - "metadata": { - "keywords": [ - "spring-web" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-spring-web", - "version": "999-SNAPSHOT", - "description": "Use Spring Web annotations to create your REST services" - }, - { - "name": "Quarkus Extension for Spring Data JPA API", - "guide": "https://quarkus.io/guides/spring-data-jpa", - "metadata": { - "keywords": [ - "spring-data" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-spring-data-jpa", - "version": "999-SNAPSHOT", - "description": "Use Spring Data JPA annotations to create your data access layer" - }, - { - "name": "Swagger UI", - "guide": "https://quarkus.io/guides/openapi-swaggerui", - "metadata": { - "keywords": [ - "swagger-ui" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-swagger-ui", - "version": "999-SNAPSHOT", - "description": "Swagger UI" - }, - { - "name": "Kotlin", - "guide": "https://quarkus.io/guides/kotlin", - "metadata": { - "keywords": [ - "kotlin" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kotlin", - "version": "999-SNAPSHOT", - "description": "Write your services in Kotlin" - }, - { - "name": "AWS Lambda", - "metadata": { - "keywords": [ - "lambda", - "aws", - "amazon" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-amazon-lambda", - "version": "999-SNAPSHOT", - "description": "AWS Lambda support" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-amazon-lambda-http", - "version": "999-SNAPSHOT", - "name": "Quarkus - Amazon Lambda HTTP - Runtime", - "description": "Allows Java applications written for a servlet container to run in AWS Lambda" - }, - { - "name": "Amazon DynamoDB client", - "metadata": { - "keywords": [ - "dynamodb", - "dynamo", - "aws", - "amazon" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-amazon-dynamodb", - "version": "999-SNAPSHOT", - "description": "A client for the Amazon DynamoDB datastore" - }, - { - "metadata": { - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-azure-functions-http", - "version": "999-SNAPSHOT", - "name": "Quarkus - HTTP Azure Functions - Runtime", - "description": "This package contains all Java interfaces and annotations to interact with Microsoft Azure functions runtime." - }, - { - "name": "Kubernetes", - "guide": "https://quarkus.io/guides/kubernetes", - "metadata": { - "keywords": [ - "kubernetes" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kubernetes", - "version": "999-SNAPSHOT", - "description": "Generate Kubernetes resources from annotations" - }, - { - "name": "Kubernetes Client", - "metadata": { - "keywords": [ - "kubernetes-client" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kubernetes-client", - "version": "999-SNAPSHOT", - "description": "Interact with Kubernetes and develop Kubernetes Operators" - }, - { - "name": "Kogito", - "guide": "https://quarkus.io/guides/kogito", - "metadata": { - "keywords": [ - "kogito", - "drools", - "jbpm" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-kogito", - "version": "999-SNAPSHOT", - "description": "Add business automation capabilities with Kogito" - }, - { - "name": "Apache Tika", - "metadata": { - "keywords": [ - "tika", - "parser" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-tika", - "version": "999-SNAPSHOT", - "description": "Extract data from your documents with Apache Tika" - }, - { - "name": "Neo4j client", - "metadata": { - "keywords": [ - "neo4j", - "graph", - "nosql", - "datastore" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-neo4j", - "version": "999-SNAPSHOT", - "description": "A client for the Neo4j graph datastore" - }, - { - "name": "Scala", - "metadata": { - "keywords": [ - "scala" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-scala", - "version": "999-SNAPSHOT", - "description": "Write your services in Scala" - }, - { - "name": "JGit", - "metadata": { - "keywords": [ - "git" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-jgit", - "version": "999-SNAPSHOT", - "description": "Access your Git repositories" - }, - { - "name": "Narayana STM - Software Transactional Memory", - "guide": "https://quarkus.io/guides/software-transactional-memory", - "metadata": { - "keywords": [ - "narayana-stm", - "narayana", - "stm", - "transactions", - "transaction", - "software-transactional-memory", - "tx", - "txs" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-narayana-stm", - "version": "999-SNAPSHOT", - "description": "Software Transactional Memory (stm) support" - }, - { - "name": "Elytron Security JDBC Realm", - "guide": "https://quarkus.io/guides/security-jdbc", - "metadata": { - "keywords": [ - "security", - "jdbc" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-elytron-security-jdbc", - "version": "999-SNAPSHOT", - "description": "Secure your applications with username/password stored in a database" - }, - { - "name": "Vault", - "guide": "https://quarkus.io/guides/vault", - "metadata": { - "keywords": [ - "vault", - "security" - ] - }, - "group-id": "io.quarkus", - "artifact-id": "quarkus-vault", - "version": "999-SNAPSHOT", - "description": "Store your credentials securely in HashiCorp Vault" - } - ] -} diff --git a/devtools/platform-properties/pom.xml b/devtools/platform-properties/pom.xml new file mode 100644 index 0000000000000..22e6f06e3b14c --- /dev/null +++ b/devtools/platform-properties/pom.xml @@ -0,0 +1,57 @@ + + + + quarkus-devtools-all + io.quarkus + 999-SNAPSHOT + + 4.0.0 + + quarkus-bom-quarkus-platform-properties + pom + Quarkus - Platform Properties + + + + + + + + src/main/resources + true + + + + + org.apache.maven.plugins + maven-resources-plugin + + + + process-resources + + resources + + + + + + io.quarkus + quarkus-platform-descriptor-json-plugin + ${project.version} + + + process-resources + + platform-properties + + + + + + + + + diff --git a/devtools/platform-properties/src/main/resources/platform-properties.properties b/devtools/platform-properties/src/main/resources/platform-properties.properties new file mode 100644 index 0000000000000..ad52b8fbb7599 --- /dev/null +++ b/devtools/platform-properties/src/main/resources/platform-properties.properties @@ -0,0 +1 @@ +platform.quarkus.native.builder-image=${platform.quarkus.native.builder-image} \ No newline at end of file diff --git a/devtools/pom.xml b/devtools/pom.xml index a86b4ffbc56f1..ec3cd0a47327e 100644 --- a/devtools/pom.xml +++ b/devtools/pom.xml @@ -21,11 +21,12 @@ platform-descriptor-json-plugin + platform-properties bom-descriptor-json platform-descriptor-json maven gradle - aesh + cli reflection-agent
    diff --git a/docs/pom.xml b/docs/pom.xml index 31ca10cb3411f..fabc2a5f77a64 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -38,8 +38,12 @@
    io.quarkus - quarkus-bootstrap-core + quarkus-devtools-registry-client ${project.version} + + + io.quarkus + quarkus-bootstrap-core org.slf4j @@ -51,10 +55,2428 @@ - - - + + + + io.quarkus + quarkus-agroal-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-alexa-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-dynamodb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-iam-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-kms-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-rest-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-lambda-xray-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-s3-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-ses-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-sns-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-amazon-sqs-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-arc-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-artemis-core-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-artemis-jms-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-avro-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-azure-functions-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-cache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-caffeine-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-config-yaml-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-consul-config-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-docker-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-jib-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-openshift-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-container-image-s2i-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-core-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-datasource-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-client-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elasticsearch-rest-high-level-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-jdbc-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-ldap-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-oauth2-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-elytron-security-properties-file-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-flyway-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-amazon-lambda-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-google-cloud-functions-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-knative-events-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-funqy-server-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-google-cloud-functions-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-google-cloud-functions-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-grpc-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-grpc-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-envers-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-panache-kotlin-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-orm-rest-data-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-reactive-panache-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-search-orm-elasticsearch-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-hibernate-validator-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-infinispan-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jacoco-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaeger-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxp-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jaxrs-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-db2-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-derby-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-h2-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mariadb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mssql-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-mysql-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jdbc-postgresql-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jgit-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsch-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsonb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-jsonp-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kafka-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kafka-streams-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-keycloak-admin-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-keycloak-authorization-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kotlin-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-config-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-kubernetes-service-binding-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-liquibase-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-gelf-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-json-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-logging-sentry-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mailer-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-micrometer-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-micrometer-registry-prometheus-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-minikube-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-panache-kotlin-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mongodb-rest-data-panache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mutiny-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-mutiny-reactive-streams-operators-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-narayana-jta-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-narayana-stm-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-neo4j-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-netty-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-client-filter-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-oidc-token-propagation-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-openshift-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-openshift-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-picocli-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-quartz-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-qute-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-datasource-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-db2-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-messaging-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-mysql-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-reactive-pg-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-redis-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jaxb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-jsonb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-mutiny-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-rest-client-reactive-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jaxb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-jsonb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-multipart-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-mutiny-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-mutiny-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-qute-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jackson-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-jsonb-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-qute-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-reactive-servlet-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-resteasy-server-common-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-scala-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-scheduler-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-security-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-security-jpa-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-context-propagation-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-fault-tolerance-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-graphql-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-health-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-jwt-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-jwt-build-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-metrics-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-openapi-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-opentracing-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-amqp-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-kafka-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-messaging-mqtt-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-streams-operators-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-smallrye-reactive-type-converters-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-boot-properties-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-cache-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-cloud-config-client-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-data-jpa-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-data-rest-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-di-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-scheduled-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-security-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-spring-web-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-swagger-ui-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-tika-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-undertow-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vault-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-core-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-graphql-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-http-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-vertx-web-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-webjars-locator-deployment + ${project.version} + pom + test + + + * + * + + + + + io.quarkus + quarkus-websockets-deployment + ${project.version} + pom + test + + + * + * + + + + + + + + + + dev.jbang + jbang-maven-plugin + 0.0.6 + + + generate-build-item-doc + process-resources + + run + + + 0.56.0 + + + --outputFile ${project.basedir}/../target/asciidoc/generated/config/quarkus-all-build-items.adoc + --dirs ${project.basedir}/../core/deployment ${project.basedir}/../core/test-extension ${project.basedir}/../extensions + + + + + org.codehaus.mojo exec-maven-plugin @@ -71,7 +2493,7 @@ io.quarkus.docs.generation.AllConfigGenerator ${project.version} - ${project.basedir}/../devtools/bom-descriptor-json/target/extensions.json + ${project.basedir}/../devtools/platform-descriptor-json/target/classes/quarkus-bom-descriptor/extensions.json ${env.MAVEN_CMD_LINE_ARGS} @@ -108,6 +2530,8 @@ ${version.surefire.plugin} ${graal-sdk.version-for-documentation} ${graal-sdk.version-for-documentation}-java11 + ${mandrel.version-for-documentation} + ${mandrel.version-for-documentation}-java11 ${rest-assured.version} ${proposed-maven-version} ${gradle-wrapper.version} @@ -138,12 +2562,12 @@ ${quarkus-base-url} ${quarkus-base-url}.git - - ${quarkus-base-url}/archive/master.zip - - ${quarkus-base-url}/blob/master - - ${quarkus-base-url}/tree/master + + ${quarkus-base-url}/archive/main.zip + + ${quarkus-base-url}/blob/main + + ${quarkus-base-url}/tree/main ${quarkus-base-url}/issues https://github.com/quarkusio/quarkus-images/tree @@ -153,12 +2577,12 @@ ${quickstarts-base-url} ${quickstarts-base-url}.git - - ${quickstarts-base-url}/archive/master.zip - - ${quickstarts-base-url}/blob/master - - ${quickstarts-base-url}/tree/master + + ${quickstarts-base-url}/archive/main.zip + + ${quickstarts-base-url}/blob/main + + ${quickstarts-base-url}/tree/main diff --git a/docs/src/main/asciidoc/README.adoc b/docs/src/main/asciidoc/README.adoc index 5fcc914ac1923..9410e5aa8f6a2 100644 --- a/docs/src/main/asciidoc/README.adoc +++ b/docs/src/main/asciidoc/README.adoc @@ -28,9 +28,9 @@ complete list of externalized variables for use is given in the following table: |\{quarkus-org-url}|{quarkus-org-url}| The location of the project github organization. |\{quarkus-base-url}|{quarkus-base-url}| Quarkus GitHub URL common base prefix. |\{quarkus-clone-url}|{quarkus-clone-url}| Quarkus URL for git clone referenced by the documentation. -|\{quarkus-archive-url}|{quarkus-archive-url}| Quarkus URL to master source archive. -|\{quarkus-blob-url}|{quarkus-blob-url}| Quarkus URL to master blob source tree; used for referencing source files. -|\{quarkus-tree-url}|{quarkus-tree-url}| Quarkus URL to master source tree root; used for referencing directories. +|\{quarkus-archive-url}|{quarkus-archive-url}| Quarkus URL to main source archive. +|\{quarkus-blob-url}|{quarkus-blob-url}| Quarkus URL to main blob source tree; used for referencing source files. +|\{quarkus-tree-url}|{quarkus-tree-url}| Quarkus URL to main source tree root; used for referencing directories. |\{quarkus-issues-url}|{quarkus-issues-url}| Quarkus URL to the issues page. |\{quarkus-images-url}|{quarkus-images-url}| Quarkus URL to set of container images delivered for Quarkus. @@ -40,10 +40,10 @@ complete list of externalized variables for use is given in the following table: |\{quickstarts-base-url}|{quickstarts-base-url}| Quickstarts URL common base prefix. |\{quickstarts-clone-url}|{quickstarts-clone-url}| Quickstarts URL for git clone referenced by the documentation. -|\{quickstarts-archive-url}|{quickstarts-archive-url}| Quickstarts URL to master source archive. -|\{quickstarts-blob-url}|{quickstarts-blob-url}| Quickstarts URL to master blob source tree; used for referencing source files. -|\{quickstarts-tree-url}|{quickstarts-tree-url}| Quickstarts URL to master source tree root; used for referencing directories. +|\{quickstarts-archive-url}|{quickstarts-archive-url}| Quickstarts URL to main source archive. +|\{quickstarts-blob-url}|{quickstarts-blob-url}| Quickstarts URL to main blob source tree; used for referencing source files. +|\{quickstarts-tree-url}|{quickstarts-tree-url}| Quickstarts URL to main source tree root; used for referencing directories. |\{graalvm-version}|{graalvm-version}| Recommended GraalVM version to use. -|\{graalvm-flavor}|{graalvm-flavor}| The full flavor of GraaVM to use e.g. `19.3.1-java11`. +|\{graalvm-flavor}|{graalvm-flavor}| The full flavor of GraaVM to use e.g. `19.3.1-java11`. Make sure to use a `java11` version. |=== diff --git a/docs/src/main/asciidoc/all-builditems.adoc b/docs/src/main/asciidoc/all-builditems.adoc new file mode 100644 index 0000000000000..4522902f6585b --- /dev/null +++ b/docs/src/main/asciidoc/all-builditems.adoc @@ -0,0 +1,15 @@ +--- +layout: guides-configuration-reference +--- +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// +include::./attributes.adoc[] + += Quarkus - Build Items + +Here you can find a list of Build Items and the extension that provides them: + +include::{generated-dir}/config/quarkus-all-build-items.adoc[opts=optional] diff --git a/docs/src/main/asciidoc/all-config.adoc b/docs/src/main/asciidoc/all-config.adoc index 39a3868d1e9b4..97750a0a177fd 100644 --- a/docs/src/main/asciidoc/all-config.adoc +++ b/docs/src/main/asciidoc/all-config.adoc @@ -4,7 +4,7 @@ layout: guides-configuration-reference //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - All configuration options diff --git a/docs/src/main/asciidoc/amazon-dynamodb.adoc b/docs/src/main/asciidoc/amazon-dynamodb.adoc index 275bb4742fc0a..236d9504d433a 100644 --- a/docs/src/main/asciidoc/amazon-dynamodb.adoc +++ b/docs/src/main/asciidoc/amazon-dynamodb.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon DynamoDB Client :extension-status: preview @@ -40,7 +40,7 @@ To complete this guide, you need: The easiest way to start working with DynamoDB is to run a local instance as a container. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- docker run --publish 8000:8000 amazon/dynamodb-local:1.11.477 -jar DynamoDBLocal.jar -inMemory -sharedDb ---- @@ -76,7 +76,7 @@ For more information, see https://docs.aws.amazon.com/amazondynamodb/latest/deve We recommend to use the AWS CLI to provision the table: -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- aws dynamodb create-table --table-name QuarkusFruits \ --attribute-definitions AttributeName=fruitName,AttributeType=S \ @@ -98,14 +98,14 @@ The solution is located in the `amazon-dynamodb-quickstart` {quickstarts-tree-ur First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-dynamodb-quickstart \ -DclassName="org.acme.dynamodb.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jsonb,amazon-dynamodb,resteasy-mutiny" + -Dextensions="resteasy,resteasy-jackson,amazon-dynamodb,resteasy-mutiny" cd amazon-dynamodb-quickstart ---- @@ -281,17 +281,13 @@ package org.acme.dynamodb; import java.util.List; import javax.inject.Inject; -import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { @Inject @@ -321,7 +317,7 @@ The implementation is pretty straightforward and you just need to define your en == Configuring DynamoDB clients Both DynamoDB clients (sync and async) are configurable via the `application.properties` file that can be provided in the `src/main/resources` directory. -Additionally, you need to added to the classpath a proper implementation of the sync client. By default the extension uses URL connection HTTP client, so +Additionally, you need to add to the classpath a proper implementation of the sync client. By default the extension uses the `java.net.URLConnection` HTTP client, so you need to add a URL connection client dependency to the `pom.xml` file: [source,xml] @@ -332,7 +328,7 @@ you need to add a URL connection client dependency to the `pom.xml` file:
    ---- -If you want to use Apache HTTP client instead, configure it as follows: +If you want to use the Apache HTTP client instead, configure it as follows: [source,properties] ---- quarkus.dynamodb.sync-client.type=apache @@ -380,7 +376,7 @@ include::./amazon-credentials.adoc[] === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -jar target/amazon-dynamodb-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. diff --git a/docs/src/main/asciidoc/amazon-iam.adoc b/docs/src/main/asciidoc/amazon-iam.adoc new file mode 100644 index 0000000000000..2e22751b228ee --- /dev/null +++ b/docs/src/main/asciidoc/amazon-iam.adoc @@ -0,0 +1,25 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Amazon IAM Client +:extension-status: preview + +include::./attributes.adoc[] + +Amazon Identity and Access Management (IAM) enables users to manage access to AWS services and resources securely. + +You can find more information about IAM at https://aws.amazon.com/iam/[the Amazon IAM website]. + +NOTE: The IAM extension is based on https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/welcome.html[AWS Java SDK 2.x]. +It's a major rewrite of the 1.x code base that offers two programming models (Blocking & Async). + +include::./status-include.adoc[] + +The Quarkus extension supports two programming models: + +* Blocking access using URL Connection HTTP client (by default) or the Apache HTTP Client +* https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/basics-async.html[Asynchronous programming] based on JDK's `CompletableFuture` objects and the Netty HTTP client. + +include::{generated-dir}/config/quarkus-amazon-iam.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/amazon-kms.adoc b/docs/src/main/asciidoc/amazon-kms.adoc index 4cff8c7673b6a..1cb3f0a8f37ae 100644 --- a/docs/src/main/asciidoc/amazon-kms.adoc +++ b/docs/src/main/asciidoc/amazon-kms.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon KMS Client :extension-status: preview @@ -39,9 +39,9 @@ To complete this guide, you need: The easiest way to start working with KMS is to run a local instance as a container. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- -docker run --rm --name local-kms 8011:4599 -e SERVICES=kms -e START_WEB=0 -d localstack/localstack:0.11.1 +docker run --rm --name local-kms --publish 8011:4599 -e SERVICES=kms -e START_WEB=0 -d localstack/localstack:0.11.1 ---- This starts a KMS instance that is accessible on port `8011`. @@ -59,18 +59,18 @@ Default output format [None]: Create a KMS master key queue using AWS CLI and store in `MASTER_KEY_ARN` environment variable. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- MASTER_KEY_ARN=`aws kms create-key --profile localstack --endpoint-url=http://localhost:8011 | cut -f3` ---- Generate a key data as 256-bit symmetric key (AES 256) -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- aws kms generate-data-key --key-id $MASTER_KEY_ARN --key-spec AES_256 --profile localstack --endpoint-url=http://localhost:8011 ---- Or, if you want to use your AWS account create a key using your default profile -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- MASTER_KEY_ARN=`aws kms create-key | cut -f3` aws kms generate-data-key --key-id $MASTER_KEY_ARN --key-spec AES_256 @@ -90,14 +90,14 @@ The solution is located in the `amazon-kms-quickstart` {quickstarts-tree-url}/am First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-kms-quickstart \ -DclassName="org.acme.kms.QuarkusKmsSyncResource" \ -Dpath="/sync" \ - -Dextensions="resteasy-jsonb,amazon-kms,resteasy-mutiny" + -Dextensions="resteasy,resteasy-jackson,amazon-kms,resteasy-mutiny" cd amazon-kms-quickstart ---- @@ -216,7 +216,7 @@ And the region from your AWS CLI profile will be used. === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -Dkey.arn=$MASTER_KEY_ARN -jar target/amazon-kms-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -Dkey.arn=$MASTER_KEY_ARN -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. diff --git a/docs/src/main/asciidoc/amazon-lambda-http.adoc b/docs/src/main/asciidoc/amazon-lambda-http.adoc index b21896dc2b461..8fcf6ec8e4f9a 100644 --- a/docs/src/main/asciidoc/amazon-lambda-http.adoc +++ b/docs/src/main/asciidoc/amazon-lambda-http.adoc @@ -1,19 +1,24 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Amazon Lambda with RESTEasy, Undertow, or Vert.x Web  += Quarkus - Amazon Lambda with RESTEasy, Undertow, or Vert.x Web :extension-status: preview include::./attributes.adoc[] -The `quarkus-amazon-lambda-http` extension allows you to write microservices with RESTEasy (JAX-RS), -Undertow (servlet), Vert.x Web, or link:funqy-http[Funqy HTTP] and make these microservices deployable as an Amazon Lambda -using https://aws.amazon.com/api-gateway/[Amazon's API Gateway] and https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html[Amazon's SAM framework]. +With Quarkus you can deploy your favorite Java HTTP frameworks as Amazon Lambda's using either the https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api.html[AWS Gateway HTTP API] +or https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-rest-api.html[AWS Gateway REST API]. This means that you can deploy your microservices written with RESTEasy (JAX-RS), +Undertow (servlet), Vert.x Web, link:funqy-http[Funqy HTTP] or any other Quarkus HTTP framework as an AWS Lambda. You can deploy your Lambda as a pure Java jar, or you can compile your project to a native image and deploy that for a smaller -memory footprint and startup time. +memory footprint and startup time. Our integration also generates SAM deployment files that can be consumed by https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html[Amazon's SAM framework]. + +Quarkus has a different extension for each Gateway API. The HTTP Gateway API is implemented within the `quarkus-amazon-lambda-http` extension. +The REST Gateway API is implemented within the `quarkus-amazon-lambda-rest` extension. If you are confused on which Gateway product to use, +Amazon has a https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-vs-rest.html[great guide] to help you navigate this decision. + include::./status-include.adoc[] @@ -41,8 +46,8 @@ for installing AWS SAM CLI. Create the Quarkus AWS Lambda maven project using our Maven Archetype. - -[source, subs=attributes+] +If you want to use the AWS Gateway HTTP API, generate your project with this script: +[source,bash,subs=attributes+] ---- mvn archetype:generate \ -DarchetypeGroupId=io.quarkus \ @@ -50,11 +55,21 @@ mvn archetype:generate \ -DarchetypeVersion={quarkus-version} ---- +If you want to use the AWS Gateway REST API, generate your project with this script: +[source,bash,subs=attributes+] +---- +mvn archetype:generate \ + -DarchetypeGroupId=io.quarkus \ + -DarchetypeArtifactId=quarkus-amazon-lambda-rest-archetype \ + -DarchetypeVersion={quarkus-version} +---- + + == Build and Deploy Build the project using maven. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install ---- @@ -66,7 +81,7 @@ available with this extension. If you want to build for native too, make sure you have GraalVM installed correctly and just add a `native` property to the build -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install -Dnative ---- @@ -75,14 +90,14 @@ NOTE: If you are building on a non-Linux system, you will need to also pass in a Lambda requires linux binaries. You can do this by passing this property to your Maven build: `-Dnative-image.docker-build=true`, or for Gradle: `--docker-build=true`. This requires you to have docker installed locally, however. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install -Dnative -Dnative-image.docker-build=true ---- == Extra Build Generated Files -After you run the build, there are a few extra files generated by the `quarkus-amazon-lambda` extension. These files +After you run the build, there are a few extra files generated by the quarkus lambda extension you are using. These files are in the the build directory: `target/` for maven, `build/` for gradle. * `function.zip` - lambda deployment file @@ -94,7 +109,7 @@ are in the the build directory: `target/` for maven, `build/` for gradle. The AWS SAM CLI allows you to run your lambda's locally on your laptop in a simulated Lambda environment. This requires docker to be installed (see their install docs). After you have built your maven project, execute this command -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sam local start-api --template target/sam.jvm.yaml ---- @@ -107,126 +122,94 @@ http://127.0.0.1:3000/hello In the console you'll see startup messages from the lambda. This particular deployment starts a JVM and loads your lambda as pure Java. -If you want to deploy a native executable of your lambda, use a different yaml template that is provided in your -generated project: - -[source, subs=attributes+] ----- -sam local start-api --template target/sam.native.yaml ----- == Deploy to AWS -There are a few steps to get your lambda running on AWS. - -=== Package your deployment. - -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- -sam package --template-file target/sam.jvm.yaml --output-template-file packaged.yaml --s3-bucket +sam deploy -t target/sam.jvm.yaml -g ---- -Type the simple name of your S3 bucket you created during. If you've built a native executable, replace -`sam.jvm.yaml` with `sam.native.yaml`. +Answer all the questions and your lambda will be deployed and the necessary hooks to the API Gateway will be set up. If +everything deploys successfully, the root URL of your microservice will be output to the console. Something like this: -=== Deploy your package - -[source, subs=attributes+] ---- -sam deploy --template-file packaged.yaml --capabilities CAPABILITY_IAM --stack-name +Key LambdaHttpApi +Description URL for application +Value https://234asdf234as.execute-api.us-east-1.amazonaws.com/ ---- -The stack name can be anything you want. - -=== Debugging AWS Deployment Problems +The `Value` attribute is the root URL for your lambda. Copy it to your browser and add `hello` at the end. -If `sam deploy`, run the `describe-stack-events` command -to get information about your deployment and what happened. +[NOTE] +Responses for binary types will be automatically encoded with base64. This is different than the behavior using +`quarkus:dev` which will return the raw bytes. Amazon's API has additional restrictions requiring the base64 encoding. +In general, client code will automatically handle this encoding but in certain custom situations, you should be aware +you may need to manually manage that encoding. -[source, subs=attributes+] ----- -aws cloudformation describe-stack-events --stack-name ----- +== Deploying a native executable -One common issue that you may run across is that your S3 bucket has to be in the same region as Amazon Lambda. -Look for this error from `describe-stack-events` output: +To deploy a native executable, you must build it with Graal. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- -Error occurred while GetObject. S3 Error Code: AuthorizationHeaderMalformed. S3 Error Message: -The authorization header is malformed; the region 'us-east-1' is wrong; expecting 'us-east-2' -(Service: AWSLambdaInternal; Status Code: 400; Error Code: InvalidParameterValueException; -Request ID: aefcf978-ad2a-4b53-9ffe-cea3fcd0f868) +./mvnw clean install -Dnative ---- -The above error is stating that my S3 bucket should be in `us-east-2`, not `us-east-1`. -To fix this error you'll need to create an S3 bucket in that region and redo steps 1 and 2 from above. - -Another annoying this is that if there is an error in deployment, you also have to completely delete -it before trying to deploy again: +You can then test the executable locally with sam local -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- -aws cloudformation delete-stack --stack-name +sam local start-api --template target/sam.native.yaml ---- -== Execute your REST Lambda on AWS - -To get the root URL for your service, type the following command and see the following output: - -[source, subs=attributes+] +To deploy to AWS Lambda: +[source,bash,subs=attributes+] ---- -aws cloudformation describe-stacks --stack-name +sam deploy -t target/sam.native.yaml -g ---- -It should give you something like the following output: -[source, subs=attributes+] ----- -{ - "Stacks": [ - { - "StackId": "arn:aws:cloudformation:us-east-1:502833056128:stack/QuarkusNativeRestExample2/b35b0200-f685-11e9-aaa0-0e8cd4caae34", - "DriftInformation": { - "StackDriftStatus": "NOT_CHECKED" - }, - "Description": "AWS Serverless Quarkus HTTP - io.demo::rest-example", - "Tags": [], - "Outputs": [ - { - "Description": "URL for application", - "ExportName": "RestExampleNativeApi", - "OutputKey": "RestExampleNativeApi", - "OutputValue": "https://234234234.execute-api.us-east-1.amazonaws.com/Prod/" - } - ], ----- - -The `OutputValue` attribute is the root URL for your lambda. Copy it to your browser and add `hello` at the end. - -[NOTE] -Responses for binary types will be automatically encoded with base64. This is different than the behavior using -`quarkus:dev` which will return the raw bytes. Amazon's API has additional restrictions requiring the base64 encoding. -In general, client code will automatically handle this encoding but in certain custom situations, you should be aware -you may need to manually manage that encoding. - == Examine the POM There is nothing special about the POM other than the inclusion of the `quarkus-amazon-lambda-http` extension -as a dependencies. The extension automatically generates everything you might need for your lambda deployment. - -NOTE: In previous versions of this extension you had to set up your pom or gradle -to zip up your executable for native deployments, but this is not the case anymore. +(if you are deploying an AWS Gateway HTTP API) or the `quarkus-amazon-lambda-rest` extension (if you are deploy an AWS Gateway REST API). +These extensions automatically generate everything you might need for your lambda deployment. -Also, at least in the generated maven archetype `pom.xml`, the `quarkus-resteasy`, `quarkus-vertx-web`, and `quarkus-underdow` +Also, at least in the generated maven archetype `pom.xml`, the `quarkus-resteasy`, `quarkus-vertx-web`, and `quarkus-undertow` dependencies are all optional. Pick which http framework(s) you want to use (JAX-RS, Vertx Web, and/or Servlet) and remove the other dependencies to shrink your deployment. === Examine sam.yaml -The `sam.yaml` syntax is beyond the scope of this document. There's a couple of things to note though that are particular -to the `quarkus-amazon-lambda-http` extension. +The `sam.yaml` syntax is beyond the scope of this document. There's a couple of things that must be highlighted just in case you are +going to craft your own custom `sam.yaml` deployment files. + +The first thing to note is that for pure Java lambda deployments require a specific handler class. +Do not change the Lambda handler name. + +[source, subs=attributes+] +---- + Properties: + Handler: io.quarkus.amazon.lambda.runtime.QuarkusStreamHandler::handleRequest + Runtime: java11 +---- + +This handler is a bridge between the lambda runtime and the Quarkus HTTP framework you are using (JAX-RS, Servlet, etc.) + +If you want to go native, there's an environment variable that must be set for native GraalVM deployments. If you look at `sam.native.yaml` +you'll see this: + +[source, subs=attributes+] +---- + Environment: + Variables: + DISABLE_SIGNAL_HANDLERS: true +---- + +This environment variable resolves some incompatibilities between Quarkus and the Amazon Lambda Custom Runtime environment. -Amazon's API Gateway assumes that HTTP response bodies are text unless you explicitly tell it which media types are +Finally, there is one specific thing for AWS Gateway REST API deployments. +That API assumes that HTTP response bodies are text unless you explicitly tell it which media types are binary through configuration. To make things easier, the Quarkus extension forces a binary (base 64) encoding of all HTTP response messages and the `sam.yaml` file must configure the API Gateway to assume all media types are binary: @@ -239,29 +222,54 @@ HTTP response messages and the `sam.yaml` file must configure the API Gateway to - "*/*" ---- -Another thing to note is that for pure Java lambda deployments, do not change the Lambda handler name. +== Injectable AWS Context Variables -[source, subs=attributes+] ----- - Properties: - Handler: io.quarkus.amazon.lambda.runtime.QuarkusStreamHandler::handleRequest - Runtime: java8 +If you are using Resteasy and JAX-RS, you can inject various AWS Context variables into your JAX-RS resource classes +using the JAX-RS `@Context` annotation. + +For the AWS HTTP API you can inject the AWS variables `com.amazonaws.services.lambda.runtime.Context` and +`com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPEvent`. Here is an example: + +[source, java] ---- +import javax.ws.rs.core.Context; +import com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPEvent; -This particular handler handles all the intricacies of integrating with the Quarkus runtime. So you must use that -handler. -Finally, there's an environment variable that must be set for native GraalVM deployments. If you look at `sam.native.yaml` -you'll see this: +@Path("/myresource") +public class MyResource { + @GET + public String ctx(@Context com.amazonaws.services.lambda.runtime.Context ctx) { } -[source, subs=attributes+] + @GET + public String event(@Context APIGatewayV2HTTPEvent event) { } + + @GET + public String requestContext(@Context APIGatewayV2HTTPEvent.RequestContext req) { } + + +} ---- - Environment: - Variables: - DISABLE_SIGNAL_HANDLERS: true + +For the AWS REST API you can inject the AWS variables `com.amazonaws.services.lambda.runtime.Context` and +`io.quarkus.amazon.lambda.http.model.AwsProxyRequestContext`. Here is an example: + +[source, java] ---- +import javax.ws.rs.core.Context; +import io.quarkus.amazon.lambda.http.model.AwsProxyRequestContext; -This environment variable resolves some incompatibilities between Quarkus and the Amazon Lambda Custom Runtime environment. + +@Path("/myresource") +public class MyResource { + @GET + public String ctx(@Context com.amazonaws.services.lambda.runtime.Context ctx) { } + + @GET + public String req(@Context AwsProxyRequestContext req) { } + +} +---- == Tracing with AWS XRay and GraalVM diff --git a/docs/src/main/asciidoc/amazon-lambda.adoc b/docs/src/main/asciidoc/amazon-lambda.adoc index e7559206ba9e7..aad26db36ed2e 100644 --- a/docs/src/main/asciidoc/amazon-lambda.adoc +++ b/docs/src/main/asciidoc/amazon-lambda.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon Lambda :extension-status: preview @@ -43,7 +43,7 @@ for installing AWS CLI. Create the Quarkus AWS Lambda maven project using our Maven Archetype. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn archetype:generate \ -DarchetypeGroupId=io.quarkus \ @@ -79,7 +79,7 @@ project and use configuration or an environment variable to pick the handler you The generated project has three lambdas within it. Two that implement the `RequestHandler` interface, and one that implements the `RequestStreamHandler` interface. One that is used and two that are unused. If you open up `src/main/resources/application.properties` you'll see this: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- quarkus.lambda.handler=test ---- @@ -89,7 +89,7 @@ with an environment variable too. If you look at the three generated handler classes in the project, you'll see that they are `@Named` differently. -[source, subs=attributes+] +[source,java,subs=attributes+] ---- @Named("test") public class TestLambda implements RequestHandler { @@ -116,7 +116,7 @@ create, update, delete, and invoke your lambdas for pure Java and native deploym Build the project using maven. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package ---- @@ -135,7 +135,7 @@ a lambda with AWS CLI. Specifically, make sure you have created an `Execution R a `LAMBDA_ROLE_ARN` environment variable in your profile or console window, Alternatively, you can edit the `manage.sh` script that is generated by the build and put the role value directly there: -[source] +[source,bash] ---- LAMBDA_ROLE_ARN="arn:aws:iam::1234567890:role/lambda-role" ---- @@ -146,7 +146,7 @@ After you run the build, there are a few extra files generated by the `quarkus-a are in the the build directory: `target/` for maven, `build/` for gradle. * `function.zip` - lambda deployment file -* `manage.sh` - wrapper around aws lamba cli calls +* `manage.sh` - wrapper around aws lambda cli calls * `bootstrap-example.sh` - example bootstrap script for native deployments * `sam.jvm.yaml` - (optional) for use with sam cli and local testing * `sam.native.yaml` - (optional) for use with sam cli and native local testing @@ -166,21 +166,21 @@ A usage statement will be printed to guide you accordingly. NOTE: If using Gradle, the path to the binaries in the `manage.sh` must be changed from `target` to `build` To see the `usage` statement, and validate AWS configuration: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh ---- You can `create` your function using the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh create ---- or if you do not have `LAMBDA_ROLE_ARN` already defined in this shell: -[source] +[source,bash] ---- LAMBDA_ROLE_ARN="arn:aws:iam::1234567890:role/lambda-role" sh target/manage.sh create ---- @@ -191,13 +191,13 @@ handler bootstraps Quarkus and wraps your actual handler so that injection can b If there are any problems creating the function, you must delete it with the `delete` function before re-running the `create` command. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh delete ---- Commands may also be stacked: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh delete create ---- @@ -206,7 +206,7 @@ sh target/manage.sh delete create Use the `invoke` command to invoke your function. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh invoke ---- @@ -216,14 +216,14 @@ in the root directory of the project. The lambda can also be invoked locally with the SAM CLI like this: -[source] +[source,bash] ---- sam local invoke --template target/sam.jvm.yaml --event payload.json ---- If you are working with your native image build, simply replace the template name with the native version: -[source] +[source,bash] ---- sam local invoke --template target/sam.native.yaml --event payload.json ---- @@ -233,7 +233,7 @@ sam local invoke --template target/sam.native.yaml --event payload.json You can update the Java code as you see fit. Once you've rebuilt, you can redeploy your lambda by executing the `update` command. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh update ---- @@ -245,13 +245,13 @@ code to a native executable. Just make sure to rebuild your project with the `- For Linux hosts execute: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn package -Pnative ---- or, if using Gradle: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./gradlew build -Dquarkus.package.type=native ---- @@ -260,13 +260,13 @@ NOTE: If you are building on a non-Linux system, you will need to also pass in a Lambda requires linux binaries. You can do this by passing this property to your Maven build: `-Dnative-image.docker-build=true`, or for Gradle: `--docker-build=true`. This requires you to have docker installed locally, however. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install -Pnative -Dnative-image.docker-build=true ---- or, if using Gradle: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./gradlew build -Dquarkus.package.type=native -Dquarkus.native.container-build=true ---- @@ -278,7 +278,7 @@ Custom (Provided) Runtime. The instructions here are exactly as above with one change: you'll need to add `native` as the first parameter to the `manage.sh` script: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh native create ---- @@ -293,7 +293,7 @@ to create, delete, and update your lambdas. One thing to note about the create command for native is that the `aws lambda create-function` call must set a specific environment variable: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- --environment 'Variables={DISABLE_SIGNAL_HANDLERS=true}' ---- @@ -315,7 +315,7 @@ for your lambda deployment. Example Gradle dependencies: -[source, groovy] +[source,groovy] ---- dependencies { implementation enforcedPlatform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") @@ -340,7 +340,7 @@ The test replicates the execution environment, for the function that is selected To use the integration tests in your project there is a required property, in `src/test/resources/application.properties`. If not included, the integration tests will be in a constant loop. -[source,shell] +[source,properties] ---- quarkus.lambda.enable-polling-jvm-mode=true ---- @@ -433,14 +433,14 @@ The `event` parameter takes any JSON file, in this case the sample `payload.json NOTE: If using Gradle, the path to the binaries in the YAML templates must be changed from `target` to `build` -[source] +[source,bash] ---- sam local invoke --template target/sam.jvm.yaml --event payload.json ---- The native image can also be locally tested using the `sam.native.yaml` template: -[source] +[source,bash] ---- sam local invoke --template target/sam.native.yaml --event payload.json ---- @@ -471,7 +471,7 @@ you will need to include `quarkus-amazon-lambda-xray` as a dependency in your po library is not fully compatible with GraalVM so we had to do some integration work to make this work. In addition, remember to enable the AWS X-Ray tracing parameter in `manage.sh`, in the `cmd_create()` function. This can also be set in the AWS Management Console. -[source] +[source,bash] ---- --tracing-config Mode=Active ---- @@ -494,7 +494,7 @@ For further information, please consult the link:native-and-ssl[Quarkus SSL guid Open src/main/resources/application.properties and add the following line to enable SSL in your native image. -[source] +[source,properties] ---- quarkus.ssl.native=true ---- @@ -605,7 +605,7 @@ The native executable requires some additional steps to enable client ssl that S To do this, first create a directory `src/main/zip.native/` with your build. Next create a shell script file called `bootstrap` within `src/main/zip.native/`, like below. An example is create automatically in your build folder (target or build), called `bootstrap-example.sh` -[source, shell] +[source,bash] ---- #!/usr/bin/env bash @@ -618,7 +618,7 @@ Next you must copy some files from your GraalVM distribution into `src/main/zip. NOTE: GraalVM versions can have different paths for these files, and whether you using the Java 8 or 11 version. Adjust accordingly. -[source, shell] +[source,bash] ---- cp $GRAALVM_HOME/lib/libsunec.so $PROJECT_DIR/src/main/zip.native/ cp $GRAALVM_HOME/lib/security/cacerts $PROJECT_DIR/src/main/zip.native/ @@ -631,7 +631,7 @@ NOTE: If you are using a Docker image to build, then you must extract these file To extract the required ssl, you must start up a Docker container in the background, and attach to that container to copy the artifacts. First, let's start the GraalVM container, noting the container id output. -[source, shell,subs=attributes+] +[source,bash,subs=attributes+] ---- docker run -it -d --entrypoint bash quay.io/quarkus/ubi-quarkus-native-image:{graalvm-flavor} @@ -641,19 +641,19 @@ docker run -it -d --entrypoint bash quay.io/quarkus/ubi-quarkus-native-image:{gr First, libsunec.so, the C library used for the SSL implementation: -[source] +[source,bash] ---- -docker cp {container-id-from-above}:/opt/graalvm/jre/lib/amd64/libsunec.so src/main/zip.native/ +docker cp {container-id-from-above}:/opt/graalvm/lib/libsunec.so src/main/zip.native/ ---- Second, cacerts, the certificate store. You may need to periodically obtain an updated copy, also. -[source] +[source,bash] ---- -docker cp {container-id-from-above}:/opt/graalvm/jre/lib/security/cacerts src/main/zip.native/ +docker cp {container-id-from-above}:/opt/graalvm/lib/security/cacerts src/main/zip.native/ ---- Your final archive will look like this: -[source, shell] +[source,bash] ---- jar tvf target/function.zip @@ -667,7 +667,7 @@ jar tvf target/function.zip To use Alexa with Quarkus native, please add the following extension. -[source, xml] +[source,xml] ---- io.quarkus diff --git a/docs/src/main/asciidoc/amazon-s3.adoc b/docs/src/main/asciidoc/amazon-s3.adoc index 7356ec055f850..c7f313dd637ad 100644 --- a/docs/src/main/asciidoc/amazon-s3.adoc +++ b/docs/src/main/asciidoc/amazon-s3.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon S3 Client @@ -37,12 +37,15 @@ To complete this guide, you need: The easiest way to start working with S3 is to run a local instance as a container. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- -docker run -it --publish 8008:4572 -e SERVICES=s3 -e START_WEB=0 localstack/localstack +docker run -it --publish 8008:4566 -e SERVICES=s3 -e START_WEB=0 localstack/localstack:0.11.5 ---- This starts a S3 instance that is accessible on port `8008`. + +NOTE: Versions of localstack newer than v0.11.5 require port `4566` instead of port `4572`. See this https://github.com/localstack/localstack/issues/2983[GitHub issue] for details on this change. + Create an AWS profile for your local instance using AWS CLI: [source,shell,subs="verbatim,attributes"] ---- @@ -57,7 +60,7 @@ Default output format [None]: Create a S3 bucket using AWS CLI -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- aws s3 mb s3://quarkus.s3.quickstart --profile localstack --endpoint-url=http://localhost:8008 ---- @@ -77,14 +80,14 @@ The solution is located in the `amazon-s3-quickstart` {quickstarts-tree-url}/ama First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-s3-quickstart \ -DclassName="org.acme.s3.S3SyncClientResource" \ -Dpath="/s3" \ - -Dextensions="resteasy-jsonb,amazon-s3" + -Dextensions="resteasy,resteasy-jackson,amazon-s3" cd amazon-s3-quickstart ---- @@ -96,11 +99,13 @@ Then, we'll add the following dependency to support `multipart/form-data` reques [source,xml] ---- - org.jboss.resteasy - resteasy-multipart-provider + io.quarkus + quarkus-resteasy-multipart ---- +NOTE: The default setting for `quarkus.http.limits.max-body-size` is 10240K. This may limit your ability to upload multipart files larger than the default. If you want to upload larger files, you will need to set this limit explicitly. + == Setting up the model In this example, we will create an application to manage a list of files. The example application will demonstrate the two programming models supported by the extension. @@ -329,7 +334,6 @@ public class S3SyncClientResource extends CommonResource { } @GET - @Produces(MediaType.APPLICATION_JSON) public List listFiles() { ListObjectsRequest listRequest = ListObjectsRequest.builder().bucket(bucketName).build(); @@ -424,7 +428,7 @@ You can now interact with your REST service: === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -jar target/amazon-s3-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. @@ -509,7 +513,6 @@ public class S3AsyncClientResource extends CommonResource { } @GET - @Produces(MediaType.APPLICATION_JSON) public Uni> listFiles() { ListObjectsRequest listRequest = ListObjectsRequest.builder() .bucket(bucketName) diff --git a/docs/src/main/asciidoc/amazon-ses.adoc b/docs/src/main/asciidoc/amazon-ses.adoc index d3a62a6064f2c..3d99ed022ef45 100644 --- a/docs/src/main/asciidoc/amazon-ses.adoc +++ b/docs/src/main/asciidoc/amazon-ses.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon SES Client :extension-status: preview @@ -38,7 +38,7 @@ To complete this guide, you need: The easiest way to start working with SES is to run a local instance as a container. However, local instance of SES is only mocks the SES APIs without the actual email sending capabilities. You can still use it for this guide to verify an API communication or integration test purposes. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- docker run --rm --name local-ses -p 8012:4579 -e SERVICES=ses -e START_WEB=0 -d localstack/localstack:0.11.1 ---- @@ -69,7 +69,7 @@ Going production, you'd need to get your account of the sandbox following the ht We assume you are going to use AWS SES sandbox for the sake of this guide. But before sending any email, you must verify sender and recipient email addresses using AWS CLI. You can use your personal email or any temporary email service available if you wish. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- aws ses verify-email-identity --email-address aws ses verify-email-identity --email-address @@ -79,7 +79,7 @@ Now, you need to open a mailboxes of those email addresses in order to follow co If you are using local SES you still need to verify email addresses, otherwise your send email in order to let local SES accepting your request. However, no emails to be send as it only mocks the service APIs. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- aws ses verify-email-identity --email-address --profile localstack --endpoint-url=http://localhost:8012 aws ses verify-email-identity --email-address --profile localstack --endpoint-url=http://localhost:8012 @@ -99,14 +99,14 @@ The solution is located in the `amazon-ses-quickstart` {quickstarts-tree-url}/am First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-ses-quickstart \ -DclassName="org.acme.ses.QuarkusSesSyncResource" \ -Dpath="/sync" \ - -Dextensions="resteasy-jsonb,amazon-ses,resteasy-mutiny" + -Dextensions="resteasy,resteasy-jackson,amazon-ses,resteasy-mutiny" cd amazon-ses-quickstart ---- @@ -208,7 +208,7 @@ And the region from your AWS CLI profile will be used. === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -jar target/amazon-ses-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. diff --git a/docs/src/main/asciidoc/amazon-sns.adoc b/docs/src/main/asciidoc/amazon-sns.adoc index 5f1e9e03bf37b..c4bfbd6931c7a 100644 --- a/docs/src/main/asciidoc/amazon-sns.adoc +++ b/docs/src/main/asciidoc/amazon-sns.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon SNS Client :extension-status: preview @@ -41,7 +41,7 @@ To complete this guide, you need: The easiest way to start working with SNS is to run a local instance as a container. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- docker run -it --publish 8009:4575 -e SERVICES=sns -e START_WEB=0 localstack/localstack:0.11.1 ---- @@ -61,13 +61,13 @@ Default output format [None]: Create a SNS topic using AWS CLI and store in `TOPIC_ARN` environment variable -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- TOPIC_ARN=`aws sns create-topic --name=QuarksCollider --profile localstack --endpoint-url=http://localhost:8009` ---- If you want to run the demo using SNS on your AWS account, you can create a topic using AWS default profile -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- TOPIC_ARN=`aws sns create-topic --name=QuarksCollider` ---- @@ -87,14 +87,14 @@ The solution is located in the `amazon-sns-quickstart` {quickstarts-tree-url}/am First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-sns-quickstart \ -DclassName="org.acme.sns.QuarksCannonSyncResource" \ -Dpath="/sync-cannon" \ - -Dextensions="resteasy-jsonb,amazon-sns,resteasy-mutiny" + -Dextensions="resteasy,resteasy-jackson,amazon-sns,resteasy-mutiny" cd amazon-sns-quickstart ---- @@ -646,7 +646,7 @@ include::./amazon-credentials.adoc[] === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -Dtopic.arn=$TOPIC_ARN -jar target/amazon-sns-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -Dtopic.arn=$TOPIC_ARN -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. diff --git a/docs/src/main/asciidoc/amazon-sqs.adoc b/docs/src/main/asciidoc/amazon-sqs.adoc index 3c2c8f295f2c0..2be94580abc44 100644 --- a/docs/src/main/asciidoc/amazon-sqs.adoc +++ b/docs/src/main/asciidoc/amazon-sqs.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Amazon SQS Client :extension-status: preview @@ -14,7 +14,7 @@ services to be available. SQS offers two types of message queues. Standard queues offer maximum throughput, best-effort ordering and at-least-once delivery. SQS FIFO queues are designed to guarantee that messages are processes exactly once, on the exact order that they were sent. -You can find more information about SQS at https://aws.amazon.com/sws/[the Amazon SQS website]. +You can find more information about SQS at https://aws.amazon.com/sqs/[the Amazon SQS website]. NOTE: The SQS extension is based on https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/welcome.html[AWS Java SDK 2.x]. It's a major rewrite of the 1.x code base that offers two programming models (Blocking & Async). @@ -42,7 +42,7 @@ To complete this guide, you need: The easiest way to start working with SQS is to run a local instance as a container. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- docker run --rm --name local-sqs -p 8010:4576 -e SERVICES=sqs -e START_WEB=0 -d localstack/localstack:0.11.1 ---- @@ -62,13 +62,13 @@ Default output format [None]: Create a SQS queue using AWS CLI and store in `QUEUE_URL` environment variable. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- QUEUE_URL=`aws sqs create-queue --queue-name=ColliderQueue --profile localstack --endpoint-url=http://localhost:8010` ---- Or, if you want to use your SQS queue on your AWS account create a queue using your default profile -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- QUEUE_URL=`aws sqs create-queue --queue-name=ColliderQueue` ---- @@ -88,14 +88,14 @@ The solution is located in the `amazon-sqs-quickstart` {quickstarts-tree-url}/am First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amazon-sqs-quickstart \ -DclassName="org.acme.sqs.QuarksCannonSyncResource" \ -Dpath="/sync-cannon" \ - -Dextensions="resteasy-jsonb,amazon-sqs,resteasy-mutiny" + -Dextensions="resteasy,resteasy-jackson,amazon-sqs,resteasy-mutiny" cd amazon-sqs-quickstart ---- @@ -242,8 +242,6 @@ public class QuarksShieldSyncResource { static ObjectReader QUARK_READER = new ObjectMapper().readerFor(Quark.class); @GET - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) public List receive() { List messages = sqs.receiveMessage(m -> m.maxNumberOfMessages(10).queueUrl(queueUrl)).messages(); @@ -325,7 +323,7 @@ And the region from your AWS CLI profile will be used. === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -Dqueue.url=$QUEUE_URL -jar target/amazon-sqs-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -Dqueue.url=$QUEUE_URL -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. @@ -423,8 +421,6 @@ public class QuarksShieldAsyncResource { static ObjectReader QUARK_READER = new ObjectMapper().readerFor(Quark.class); @GET - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) public Uni> receive() { return Uni.createFrom() .completionStage(sqs.receiveMessage(m -> m.maxNumberOfMessages(10).queueUrl(queueUrl))) diff --git a/docs/src/main/asciidoc/amqp.adoc b/docs/src/main/asciidoc/amqp.adoc index c234a5802699a..804ae4b19a49c 100644 --- a/docs/src/main/asciidoc/amqp.adoc +++ b/docs/src/main/asciidoc/amqp.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using AMQP with Reactive Messaging :extension-status: preview @@ -46,12 +46,13 @@ The solution is located in the `amqp-quickstart` {quickstarts-tree-url}/amqp-qui First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=amqp-quickstart \ - -Dextensions="amqp" + -Dextensions="amqp" \ + -DnoExamples cd amqp-quickstart ---- @@ -93,17 +94,17 @@ Create the `src/main/java/org/acme/amqp/PriceGenerator.java` file, with the foll ---- package org.acme.amqp; -import io.reactivex.Flowable; -import org.eclipse.microprofile.reactive.messaging.Outgoing; +import java.time.Duration; +import java.util.Random; import javax.enterprise.context.ApplicationScoped; -import java.util.Random; -import java.util.concurrent.TimeUnit; + +import io.smallrye.mutiny.Multi; +import org.eclipse.microprofile.reactive.messaging.Outgoing; /** * A bean producing random prices every 5 seconds. - * The prices are written to an AMQP queue (prices). The AMQP configuration is specified in the - * application configuration. + * The prices are written to a AMQP queue (prices). The AMQP configuration is specified in the application configuration. */ @ApplicationScoped public class PriceGenerator { @@ -111,15 +112,17 @@ public class PriceGenerator { private Random random = new Random(); @Outgoing("generated-price") // <1> - public Flowable generate() { // <2> - return Flowable.interval(5, TimeUnit.SECONDS) + public Multi generate() { // <2> + return Multi.createFrom().ticks().every(Duration.ofSeconds(5)) + .onOverflow().drop() .map(tick -> random.nextInt(100)); } } + ---- <1> Instruct Reactive Messaging to dispatch the items from returned stream to `generated-price`. -<2> The method returns a RX Java 2 _stream_ (`Flowable`) emitting a random _price_ every 5 seconds. +<2> The method returns a Mutiny _stream_ (`Multi`) emitting a random _price_ every 5 seconds. The method returns a _Reactive Stream_. The generated items are sent to the stream named `generated-price`. This stream is mapped to an AMQP queue using the `application.properties` file that we will create soon. @@ -198,7 +201,6 @@ public class PriceResource { return "hello"; } - @GET @Path("/stream") @Produces(MediaType.SERVER_SENT_EVENTS) // <2> @@ -222,7 +224,7 @@ The `channel-name` segment must match the value set in the `@Incoming` and `@Out * `generated-price` -> sink in which we write the prices * `prices` -> source in which we read the prices -[source] +[source,properties] ---- # Configures the AMQP broker credentials. amqp-username=quarkus @@ -286,7 +288,7 @@ Nothing spectacular here. On each received price, it updates the page. If you followed the instructions, you should have the AMQP broker running. Then, you just need to run the application using: -[source, shell] +[source,bash] ---- ./mvnw quarkus:dev ---- @@ -299,7 +301,7 @@ NOTE: If you started the AMQP broker with docker compose, stop it using `CTRL+C` You can build the native executable with: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative ---- diff --git a/docs/src/main/asciidoc/azure-functions-http.adoc b/docs/src/main/asciidoc/azure-functions-http.adoc index 300fd138649f5..d5f3b3be77a6e 100644 --- a/docs/src/main/asciidoc/azure-functions-http.adoc +++ b/docs/src/main/asciidoc/azure-functions-http.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Azure Functions (Serverless) with RESTEasy, Undertow, or Vert.x Web :extension-status: preview @@ -15,6 +15,8 @@ One azure function deployment can represent any number of JAX-RS, servlet, Vert. include::./status-include.adoc[] +NOTE: Only text based media types are supported at the moment as Azure Functions HTTP Trigger for Java does not support a binary format + == Prerequisites To complete this guide, you need: @@ -36,7 +38,7 @@ to Azure. Create the azure maven project for your Quarkus application using our Maven Archetype. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn archetype:generate \ -DarchetypeGroupId=io.quarkus \ @@ -61,7 +63,7 @@ The values above are defined as properties in the generated `pom.xml` file. If you don't login to Azure you won't be able to deploy. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- az login ---- @@ -72,7 +74,7 @@ The `pom.xml` you generated in the previous step pulls in the `azure-functions-m generates config files and a staging directory required by the `azure-functions-maven-plugin`. Here's how to execute it. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install azure-functions:deploy ---- diff --git a/docs/src/main/asciidoc/blaze-persistence.adoc b/docs/src/main/asciidoc/blaze-persistence.adoc index 16473ca8d5846..f77327baf4e1c 100644 --- a/docs/src/main/asciidoc/blaze-persistence.adoc +++ b/docs/src/main/asciidoc/blaze-persistence.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Blaze-Persistence @@ -22,7 +22,7 @@ include::./platform-include.adoc[] The extension comes with default producers for `CriteriaBuilderFactory` and `EntityViewManager` that work out of the box given a working Hibernate ORM configuration. For customization, overriding of the default producers is possible via the -standard mechanism as documented in the link:{quarkus-home-url}/guides/cdi-reference#default_beans[Quarkus CDI reference]. +standard mechanism as documented in the link:cdi-reference#default_beans[Quarkus CDI reference]. This is needed if you need to set custom link:https://persistence.blazebit.com/documentation/entity-view/manual/en_US/index.html#anchor-configuration-properties[Blaze-Persistence properties]. In Quarkus, you just need to: @@ -47,6 +47,11 @@ Add the following dependencies to your project: com.blazebit blaze-persistence-integration-quarkus + + com.blazebit + blaze-persistence-integration-hibernate-5.4 + runtime + ---- @@ -68,7 +73,7 @@ The use in native images requires a dependency on the entity view annotation pro ---- -A `CriteriaBuilderFactory` and an `EntityViewManager` will be created based on the configured `EntityManagerFactory` as provided by the link:{quarkus-home-url}/guides/hibernate-orm[Hibernate-ORM extension]. +A `CriteriaBuilderFactory` and an `EntityViewManager` will be created based on the configured `EntityManagerFactory` as provided by the link:hibernate-orm[Hibernate-ORM extension]. You can then access these beans via injection: @@ -87,7 +92,7 @@ public class SantaClausService { @Transactional <4> public List findAllGifts() { CriteriaBuilder cb = cbf.create(em, Gift.class); - return emv.applySetting(EntityViewSetting.create(GiftView.class), cb).getResultList(); + return evm.applySetting(EntityViewSetting.create(GiftView.class), cb).getResultList(); } } ---- @@ -172,8 +177,6 @@ public class GiftResource { SantaClausService santaClausService; @POST - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) @Transactional public Response createGift(GiftUpdateView view) { entityViewManager.save(entityManager, view); @@ -188,8 +191,6 @@ public class GiftResource { @PUT @Path("{id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) @Transactional public GiftView updateGift(@EntityViewId("id") GiftUpdateView view) { evm.save(em, view); diff --git a/docs/src/main/asciidoc/building-my-first-extension.adoc b/docs/src/main/asciidoc/building-my-first-extension.adoc index 404778a5438cf..680145a323e4b 100644 --- a/docs/src/main/asciidoc/building-my-first-extension.adoc +++ b/docs/src/main/asciidoc/building-my-first-extension.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Building my first extension @@ -18,7 +18,7 @@ The extension will expose a customizable HTTP endpoint which simply greets the v [NOTE] .Disclaimer To be sure it's extra clear you don't need an extension to add a Servlet to your application. -This guide is a simplified example to explain the concepts of extensions development. +This guide is a simplified example to explain the concepts of extensions development, see the link:writing-extensions[full documentation] if you need more information. Keep in mind it's not representative of the power of moving things to build time or simplifying the build of native images. == Prerequisites @@ -30,15 +30,6 @@ To complete this guide, you need: * JDK 1.8+ installed with `JAVA_HOME` configured appropriately * Apache Maven {maven-version} -== Solution - -We recommend that you follow the instructions in the next sections and create the extension step by step. -However, you can go right to the completed example. - -Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. - -The solution is located in the `getting-started-extension` {quickstarts-tree-url}/getting-started-extension[directory]. - == Basic Concepts First things first, we will need to start with some basic concepts. @@ -51,7 +42,7 @@ The operation of compiling Java bytecode into a native system-specific machine c * build time vs runtime in classic Java frameworks ** The build time corresponds to all the actions you apply to your Java source files to convert them into something runnable (class files, jar/war, native images). - Usually this stage is composed by the compilation, annotation processing, bytecode generation, etc. At this point, everything is under developer's scope and control. + Usually this stage is composed by the compilation, annotation processing, bytecode generation, etc. At this point, everything is under the developer's scope and control. ** The runtime is all the actions that happen when you execute your application. It's obviously focused on starting your business-oriented actions but it relies on a lot of technical actions like loading libraries and configuration files, scanning the application's classpath, configuring the dependency injection, setting up your Object-Relational Mapping, instantiating your REST controllers, etc. @@ -100,132 +91,174 @@ Now that everything is explained, we can start coding! Quarkus provides `create-extension` Maven Mojo to initialize your extension project. -[source, shell] +It will try to auto-detect its options: + +* from `quarkus` (Quarkus Core) or `quarkus/extensions` directory, it will use the 'Quarkus Core' extension layout and defaults. +* with `-DgroupId=io.quarkiverse.[extensionId]`, it will use the 'Quarkiverse' extension layout and defaults. +* in other cases it will use the 'Standalone' extension layout and defaults. +* we may introduce other layout types in the future. + +TIP: You may call it without any parameter to use the interactive mode: `mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create-extension -N` + +[source,shell,subs=attributes+] ---- $ mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create-extension -N \ -DgroupId=org.acme \ #<1> - -DartifactId=quarkus-greeting \ #<2> - -Dversion=1.0-SNAPSHOT \ #<3> - -Dquarkus.nameBase="Greeting Extension" #<4> -[INFO] Scanning for projects... -[INFO] -[INFO] ------------------< org.apache.maven:standalone-pom >------------------- -[INFO] Building Maven Stub Project (No POM) 1 -[INFO] --------------------------------[ pom ]--------------------------------- -[INFO] + -DextensionId=greeting-extension \ #<2> + -DwithoutTests #<3> + [INFO] --- quarkus-maven-plugin:{quarkus-version}:create-extension (default-cli) @ standalone-pom --- + +Detected layout type is 'standalone' #<4> +Generated runtime artifactId is 'greeting-extension' #<5> + + +applying codestarts... +🔠 java +🧰 maven +🗃 quarkus-extension +🐒 extension-base + +----------- +👍 extension has been successfully generated in: +--> /Users/ia3andy/workspace/redhat/quarkus/demo/greeting-extension +----------- [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ -[INFO] Total time: 1.233 s -[INFO] Finished at: 2020-04-22T23:28:15+02:00 +[INFO] Total time: 1.659 s +[INFO] Finished at: 2021-01-25T16:17:16+01:00 [INFO] ------------------------------------------------------------------------ + ---- -<1> Project's groupId -<2> artifactId for the runtime artifact of the extension (the deployment artifactId will be derived from the runtime artifactId by appending `-deployment`) -<3> Project's version -<4> Prefix for the `` element values in the generated POMs +<1> The extension groupId +<2> The extension id (not namespaced). +<3> Indicate that we don't want to generate any test +<4> From a directory with no pom.xml and without any further options, the generator will automatically pick the 'standalone' extension layout +<5> With the 'standalone' layout, the `namespaceId` is empty by default, so the computed runtime module artifactId is the `extensionId` -Maven has generated a `quarkus-greeting` directory containing the extension project which consists of the parent `pom.xml`, the `runtime` and the `deployment` modules. +Maven has generated a `greeting-extension` directory containing the extension project which consists of the parent `pom.xml`, the `runtime` and the `deployment` modules. === The parent pom.xml -Your extension is a multi-module project. So let's start by checking out the parent POM at `./quarkus-greeting/pom.xml`. +Your extension is a multi-module project. So let's start by checking out the parent POM at `./greeting-extension/pom.xml`. -[source, xml] +[source, xml, subs=attributes+] ---- - - 4.0.0 - - org.acme - quarkus-greeting-parent - 1.0-SNAPSHOT - Greeting Extension - Parent - - pom - - UTF-8 - UTF-8 - 1.8 - 1.8 - true - {quarkus-version} - 3.8.1 - - #<1> - deployment - runtime - - - - - io.quarkus - quarkus-bom-deployment #<2> - ${quarkus.version} - pom - import - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${compiler-plugin.version} #<3> - - - - + + 4.0.0 + org.acme + greeting-extension-parent + 1.0.0-SNAPSHOT + pom + Greeting Extension - Parent + + deployment + runtime + + + 3.8.1 + ${surefire-plugin.version} + true + 1.8 + 1.8 + UTF-8 + UTF-8 + {quarkus-version} + 2.22.2 + + + + + io.quarkus + quarkus-bom + ${quarkus.version} + pom + import + + + + + + + + maven-surefire-plugin + ${surefire-plugin.version} + + + org.jboss.logmanager.LogManager + ${maven.home} + ${settings.localRepository} + + + + + maven-failsafe-plugin + ${failsafe-plugin.version} + + + org.jboss.logmanager.LogManager + ${maven.home} + ${settings.localRepository} + + + + + maven-compiler-plugin + ${compiler-plugin.version} + + + + ---- <1> Your extension declares 2 sub-modules `deployment` and `runtime`. -<2> The `quarkus-bom-deployment` aligns your dependencies with those used by Quarkus during the augmentation phase. -<3> Quarkus requires a recent version of the Maven compiler plugin supporting the annotationProcessorPaths configuration. +<2> Quarkus requires a recent version of the Maven compiler plugin supporting the annotationProcessorPaths configuration. +<3> The `quarkus-bom` aligns your dependencies with those used by Quarkus during the augmentation phase. +<4> Quarkus requires these configs to run tests properly === The Deployment module -Let's have a look at the deployment's `./quarkus-greeting/deployment/pom.xml`. +Let's have a look at the deployment's `./greeting-extension/deployment/pom.xml`. [source, xml] ---- - + 4.0.0 org.acme - quarkus-greeting-parent - 1.0-SNAPSHOT - ../pom.xml + greeting-extension-parent + 1.0.0-SNAPSHOT - quarkus-greeting-deployment + greeting-extension-deployment Greeting Extension - Deployment - io.quarkus - quarkus-core-deployment - ${quarkus.version} + io.quarkus + quarkus-arc-deployment org.acme - quarkus-greeting + greeting-extension ${project.version} + + io.quarkus + quarkus-junit5-internal + test + - org.apache.maven.plugins maven-compiler-plugin @@ -245,24 +278,24 @@ Let's have a look at the deployment's `./quarkus-greeting/deployment/pom.xml`. The key points are: -<1> By convention, the deployment module has the `-deployment` suffix (`greeting-deployment`). -<2> The deployment module depends on the `quarkus-core-deployment` artifact. +<1> By convention, the deployment module has the `-deployment` suffix (`greeting-extension-deployment`). +<2> The deployment module depends on the `quarkus-arc-deployment` artifact. We will see later which dependencies are convenient to add. <3> The deployment module also *must* depend on the runtime module. <4> We add the `quarkus-extension-processor` to the compiler annotation processors. -In addition to the `pom.xml` `create-extension` also generated the `org.acme.quarkus.greeting.deployment.GreetingProcessor` class. +In addition to the `pom.xml` `create-extension` also generated the `org.acme.greeting.extension.deployment.GreetingExtensionProcessor` class. [source, java] ---- -package org.acme.quarkus.greeting.deployment; +package org.acme.greeting.extension.deployment; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.FeatureBuildItem; -class GreetingProcessor { +class GreetingExtensionProcessor { - private static final String FEATURE = "greeting"; + private static final String FEATURE = "greeting-extension"; @BuildStep FeatureBuildItem feature() { @@ -272,7 +305,7 @@ class GreetingProcessor { } ---- -NOTE: `FeatureBuildItem` represents a functionality provided by an extension. +NOTE: `FeatureBuildItem` represents a functionality provided by an extension. The name of the feature gets displayed in the log during application bootstrap. An extension should provide at most one feature. @@ -282,36 +315,39 @@ In other words, you are augmenting your application to use the `greeting` extens === The Runtime module -Finally `./quarkus-greeting/runtime/pom.xml`. +Finally `./greeting-extension/runtime/pom.xml`. [source, xml] ---- - + 4.0.0 org.acme - quarkus-greeting-parent - 1.0-SNAPSHOT - ../pom.xml + greeting-extension-parent + 0.0.1-snapshot - quarkus-greeting + greeting-extension Greeting Extension - Runtime + + io.quarkus + quarkus-arc + io.quarkus - quarkus-bootstrap-maven-plugin + quarkus-bootstrap-maven-plugin ${quarkus.version} + compile extension-descriptor @@ -330,7 +366,7 @@ Finally `./quarkus-greeting/runtime/pom.xml`. io.quarkus - quarkus-extension-processor + quarkus-extension-processor ${quarkus.version} @@ -343,9 +379,10 @@ Finally `./quarkus-greeting/runtime/pom.xml`. The key points are: -<1> By convention, the runtime module has no suffix (`greeting`) as it is the artifact exposed to the end user. -<2> We add the `quarkus-bootstrap-maven-plugin` to generate the Quarkus extension descriptor included into the runtime artifact which links it with the corresponding deployment artifact. -<3> We add the `quarkus-extension-processor` to the compiler annotation processors. +<1> By convention, the runtime module has no suffix (`greeting-extension`) as it is the artifact exposed to the end user. +<2> The runtime module depends on the `quarkus-arc` artifact. +<3> We add the `quarkus-bootstrap-maven-plugin` to generate the Quarkus extension descriptor included into the runtime artifact which links it with the corresponding deployment artifact. +<4> We add the `quarkus-extension-processor` to the compiler annotation processors. == Basic version of the Sample Greeting extension @@ -356,23 +393,30 @@ To do so, our extension will deploy, in the user application, a Servlet exposing The `runtime` module is where you develop the feature you want to propose to your users, so it's time to create our Web Servlet. To use Servlets in your applications you need to have a Servlet Container such as http://undertow.io[Undertow]. -Luckily, `quarkus-bom-deployment` imported by our parent `pom.xml` already includes the Undertow Quarkus extension. -All we need to do is add +Luckily, `quarkus-bom` imported by our parent `pom.xml` already includes the Undertow Quarkus extension. + +All we need to do is add `quarkus-undertow` as dependency to our `./greeting-extension/runtime/pom.xml`: [source, xml] ---- - - - io.quarkus - quarkus-undertow - - + + io.quarkus + quarkus-undertow + +---- + +NOTE: The dependency on `quarkus-arc` generated by the `create-extension` mojo can now be removed since +`quarkus-undertow` already depends on it. + +Now we can create our Servlet `org.acme.greeting.extension.GreetingExtensionServlet` in the `runtime` module. + +[source,bash] +---- +mkdir -p ./greeting-extension/runtime/src/main/java/org/acme/greeting/extension ---- -to our `./quarkus-greeting/runtime/pom.xml`. -Now we can create our Servlet `org.acme.quarkus.greeting.GreetingServlet` in the `runtime` module. [source, java] ---- -package org.acme.quarkus.greeting; +package org.acme.greeting.extension; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; @@ -381,7 +425,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; @WebServlet -public class GreetingServlet extends HttpServlet { // <1> +public class GreetingExtensionServlet extends HttpServlet { // <1> @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { // <2> @@ -401,18 +445,21 @@ Thanks to basic concepts, you will describe the items to produce/consume and the The `io.quarkus.builder.item.BuildItem` concept represents object instances you will produce or consume (and at some point convert into bytecode) thanks to methods annotated with `@io.quarkus.deployment.annotations.BuildStep` which describe your extension's deployment tasks. -Go back to the generated `org.acme.quarkus.greeting.deployment.GreetingProcessor` class. +NOTE:: See link:all-builditems[the complete list of BuildItem implementations in core] for more information + + +Go back to the generated `org.acme.greeting.extension.deployment.GreetingExtensionProcessor` class. [source, java] ---- -package org.acme.quarkus.greeting.deployment; +package org.acme.greeting.extension.deployment; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.FeatureBuildItem; -class GreetingProcessor { +class GreetingExtensionProcessor { - private static final String FEATURE = "greeting"; + private static final String FEATURE = "greeting-extension"; @BuildStep // <1> FeatureBuildItem feature() { @@ -444,39 +491,31 @@ To create your `BuildItem` you can extend: It's now time to declare our HTTP endpoint. To do so, we need to produce a `ServletBuildItem`. At this point, we are sure you understood that if the `quarkus-undertow` dependency proposes Servlet support for our `runtime` module, we will need the `quarkus-undertow-deployment` dependency in our `deployment` module to have access to the `io.quarkus.undertow.deployment.ServletBuildItem`. -Update the `./quarkus-greeting/deployment/pom.xml` as follows: - +Let's add `quarkus-undertow-deployment` as dependency to our `./greeting-extension/deployment/pom.xml`: [source, xml] ---- - - - org.acme - quarkus-greeting - ${project.version} - - - io.quarkus - quarkus-undertow-deployment - - + + io.quarkus + quarkus-undertow-deployment + ---- -NOTE: The dependency on `quarkus-core-deployment` generated by the `create-extension` mojo can now be removed since +NOTE: The dependency on `quarkus-arc-deployment` generated by the `create-extension` mojo can now be removed since `quarkus-undertow-deployment` already depends on it. -We can now update `org.acme.quarkus.greeting.deployment.GreetingProcessor`: +We can now update `org.acme.greeting.extension.deployment.GreetingExtensionProcessor`: [source, java] ---- -package org.acme.quarkus.greeting.deployment; +package org.acme.greeting.extension.deployment; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.FeatureBuildItem; -import org.acme.quarkus.greeting.GreetingServlet; +import org.acme.greeting.extension.GreetingExtensionServlet; import io.quarkus.undertow.deployment.ServletBuildItem; -class GreetingProcessor { +class GreetingExtensionProcessor { - private static final String FEATURE = "greeting"; + private static final String FEATURE = "greeting-extension"; @BuildStep FeatureBuildItem feature() { @@ -485,7 +524,7 @@ class GreetingProcessor { @BuildStep ServletBuildItem createServlet() { // <1> - ServletBuildItem servletBuildItem = ServletBuildItem.builder("greeting", GreetingServlet.class.getName()) + ServletBuildItem servletBuildItem = ServletBuildItem.builder("greeting-extension", GreetingExtensionServlet.class.getName()) .addMapping("/greeting") .build(); // <2> return servletBuildItem; @@ -497,106 +536,40 @@ class GreetingProcessor { <1> We add a `createServlet` method which returns a `ServletBuildItem` and annotate it with `@BuildStep`. Now, Quarkus will process this new task which will result in the bytecode generation of the Servlet registration at build time. -<2> `ServletBuildItem` proposes a fluent API to instantiate a Servlet named `greeting` of type `GreetingServlet` (it's our class provided by our extension `runtime` module), and map it the `/greeting` path. +<2> `ServletBuildItem` proposes a fluent API to instantiate a Servlet named `greeting-extension` of type `GreetingExtensionServlet` (it's our class provided by our extension `runtime` module), and map it the `/greeting` path. -=== Testing the Greeting feature +=== Testing the Greeting Extension feature When developing a Quarkus extension, you mainly want to test your feature is properly deployed in an application and works as expected. That's why the tests will be hosted in the `deployment` module. -Let's add the testing dependencies into the `./quarkus-greeting/deployment/pom.xml` and `maven-surefire` configuration +Quarkus proposes facilities to test extensions via the `quarkus-junit5-internal` artifact (which should already be in the deployment pom.xml), in particular the `io.quarkus.test.QuarkusUnitTest` runner which starts an application with your extension. + +We will use http://rest-assured.io[RestAssured] (massively used in Quarkus) to test our HTTP endpoint. +Let's add the `rest-assured` dependency into the `./greeting-extension/deployment/pom.xml`. [source, xml] ---- - - - 4.0.0 - - org.acme - quarkus-greeting-parent - 1.0-SNAPSHOT - ../pom.xml - - - quarkus-greeting-deployment - Greeting Extension - Deployment - - - 3.0.0-M4 - - - - - org.acme - quarkus-greeting - ${project.version} - - - io.quarkus - quarkus-undertow-deployment - - - io.quarkus - quarkus-junit5-internal - test - - - io.rest-assured - rest-assured - test - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - - - io.quarkus - quarkus-extension-processor - ${quarkus.version} - - - - - - maven-surefire-plugin - ${maven.surefire.version} - - - org.jboss.logmanager.LogManager - ${maven.home} - - - - - - - - + ... + + io.rest-assured + rest-assured + test + ---- -<1> Quarkus proposes facilities to test extensions via the `quarkus-junit5-internal` artifact, in particular the `io.quarkus.test.QuarkusUnitTest` runner which starts an application with your extension. -<2> We will use http://rest-assured.io[RestAssured] (massively used in Quarkus) to test our HTTP endpoint. -<3> In order to not fallback to JUnit 4 legacy mode you need to define a recent version of `maven-surefire` plugin. +The `create-extension` Maven Mojo can create the test and integration-test structure (drop the `-DwithoutTests`). Here, we'll create it ourselves: -Currently, the `create-extension` Maven Mojo does not create the test structure. We'll create it ourselves: - -[source, shell] +[source,bash] ---- -mkdir -p ./quarkus-greeting/deployment/src/test/java/org/acme/quarkus/greeting/deployment +mkdir -p ./greeting-extension/deployment/src/test/java/org/acme/greeting/extension/deployment ---- -To start testing your extension, create the following `org.acme.quarkus.greeting.deployment.GreetingTest` test class: +To start testing your extension, create the following `org.acme.greeting.extension.deployment.GreetingExtensionTest` test class: [source, java] ---- -package org.acme.quarkus.greeting.deployment; +package org.acme.greeting.extension.deployment; import io.quarkus.test.QuarkusUnitTest; import io.restassured.RestAssured; @@ -607,7 +580,7 @@ import org.junit.jupiter.api.extension.RegisterExtension; import static org.hamcrest.Matchers.containsString; -public class GreetingTest { +public class GreetingExtensionTest { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() @@ -624,11 +597,11 @@ public class GreetingTest { <1> We register a Junit Extension which will start a Quarkus application with the Greeting extension. <2> We verify the application has a `greeting` endpoint responding to a HTTP GET request with a OK status (200) and a plain text body containing `Hello` -Time to test! +Time to test and install to our local maven repository! -[source, shell] +[source,shell,subs=attributes+] ---- -$ mvn clean test +$ mvn clean install [INFO] Scanning for projects... [INFO] ------------------------------------------------------------------------ [INFO] Reactor Build Order: @@ -637,35 +610,41 @@ $ mvn clean test [INFO] Greeting Extension - Runtime [jar] [INFO] Greeting Extension - Deployment [jar] [INFO] +[INFO] -----------------< org.acme:greeting-extension-parent >----------------- +[INFO] Building Greeting Extension - Parent 1.0.0-SNAPSHOT [1/3] +[INFO] --------------------------------[ pom ]--------------------------------- ... -[INFO] --- maven-surefire-plugin:3.0.0-M4:test (default-test) @ quarkus-greeting-deployment --- -[INFO] [INFO] ------------------------------------------------------- [INFO] T E S T S [INFO] ------------------------------------------------------- -[INFO] Running org.acme.quarkus.greeting.deployment.GreetingTest -2020-04-23 13:55:44,612 INFO [io.quarkus] (main) Quarkus {quarkus-version} started in 0.395s. Listening on: http://0.0.0.0:8081 -2020-04-23 13:55:44,614 INFO [io.quarkus] (main) Profile test activated. -2020-04-23 13:55:44,614 INFO [io.quarkus] (main) Installed features: [cdi, quarkus-greeting, servlet] -2020-04-23 13:55:45,876 INFO [io.quarkus] (main) Quarkus stopped in 0.025s -[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.609 s - in org.acme.quarkus.greeting.deployment.GreetingTest -[INFO] +[INFO] Running org.acme.greeting.extension.deployment.GreetingExtensionTest +2021-01-27 10:24:42,506 INFO [io.quarkus] (main) Quarkus {quarkus-version} on JVM started in 0.470s. Listening on: http://localhost:8081 +2021-01-27 10:24:42,508 INFO [io.quarkus] (main) Profile test activated. +2021-01-27 10:24:42,508 INFO [io.quarkus] (main) Installed features: [cdi, greeting-extension, servlet] +2021-01-27 10:24:43,764 INFO [io.quarkus] (main) Quarkus stopped in 0.018s +[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.799 s - in org.acme.greeting.extension.deployment.GreetingExtensionTest +[INFO] [INFO] Results: -[INFO] +[INFO] [INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0 [INFO] +[INFO] +[INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ greeting-extension-deployment --- +[INFO] Building jar: /Users/ia3andy/workspace/redhat/quarkus/demo/greeting-extension/deployment/target/greeting-extension-deployment-1.0.0-SNAPSHOT.jar +[INFO] +[INFO] --- maven-install-plugin:2.4:install (default-install) @ greeting-extension-deployment --- +... [INFO] ------------------------------------------------------------------------ -[INFO] Reactor Summary for getting-started-extension 1.0-SNAPSHOT: +[INFO] Reactor Summary for Greeting Extension - Parent 1.0.0-SNAPSHOT: [INFO] -[INFO] getting-started-extension .......................... SUCCESS [ 0.076 s] -[INFO] Greeting Extension - Parent ........................ SUCCESS [ 0.002 s] -[INFO] Greeting Extension - Runtime ....................... SUCCESS [ 1.467 s] -[INFO] Greeting Extension - Deployment .................... SUCCESS [ 4.099 s] +[INFO] Greeting Extension - Parent ........................ SUCCESS [ 0.303 s] +[INFO] Greeting Extension - Runtime ....................... SUCCESS [ 3.345 s] +[INFO] Greeting Extension - Deployment .................... SUCCESS [ 7.365 s] [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ -[INFO] Total time: 5.745 s -[INFO] Finished at: 2020-01-28T22:40:56+01:00 +[INFO] Total time: 11.246 s +[INFO] Finished at: 2021-01-27T10:24:44+01:00 [INFO] ------------------------------------------------------------------------ ---- @@ -680,14 +659,14 @@ Edsger W. Dijkstra ==== Debugging your application build Since your extension deployment is made during the application build, this process is triggered by your build tool. -That means if your want to debug this phase you need to launch your build tool with the remote debug mode switched one. +That means if you want to debug this phase you need to launch your build tool with the remote debug mode switched one. ===== Maven You can activate Maven remote debugging by using `mvnDebug`. You can launch your application with the following command line: -[source, shell] +[source,bash] ---- mvnDebug clean compile quarkus:dev ---- @@ -700,12 +679,12 @@ Now, you can run your IDE `Remote` configuration to attach it to `localhost:8000 You can activate Gradle remote debugging by using the flags `org.gradle.debug=true` or `org.gradle.daemon.debug=true` in daemon mode. You can launch your application with the following command line: -[source, shell] +[source,bash] ---- ./gradlew quarkusDev -Dorg.gradle.daemon.debug=true ---- -By default, Maven will wait for a connection on `localhost:5005`. +By default, Gradle will wait for a connection on `localhost:5005`. Now, you can run your IDE `Remote` configuration to attach it to `localhost:5005`. @@ -714,82 +693,73 @@ Now, you can run your IDE `Remote` configuration to attach it to `localhost:5005 We have seen together how to test your extension and sometimes things don't go so well and you want to debug your tests. Same principle here, the trick is to enable the Maven Surefire remote debugging in order to attach an IDE `Remote` configuration. -[source, shell] +[source,shell] ---- -$ cd ./greeting -$ mvn clean test -Dmaven.surefire.debug +cd ./greeting-extension +mvn clean test -Dmaven.surefire.debug ---- By default, Maven will wait for a connection on `localhost:5005`. -=== Extension publication +=== Time to use your new extension -Now that you just finish to build your first extension you should be eager to use it in a Quarkus application! +Now that you just finished building your first extension you should be eager to use it in a Quarkus application! *Classic Maven publication* -Because your extension produces traditional JARs, the easiest way to share your extension is to publish it to a Maven repository. -Once published you can simply declare it with your project dependencies. Let's demonstrate that by creating a simple Quarkus application +If not already done in the previous step, you should install the `greeting-extension` in your local repository: +[source,shell] +---- +cd ./greeting-extension +mvn clean install +---- -[source, schell] +Then from another directory, use our tooling to create a new `greeting-app` Quarkus application with your new extension: +[source,bash, subs=attributes+] ---- -$mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=greeting-app \ - -DprojectVersion=1.0-SNAPSHOT \ - -DclassName=HelloResource + -Dextensions="org.acme:greeting-extension:1.0.0-SNAPSHOT" \ + -DnoExamples ---- -`cd` into `greeting-app` and add the dependency on `quarkus-greeting` extension we created above. +`cd` into `greeting-app`. -NOTE: `quarkus-greeting` extension has to be installed in the local Maven repository to be usable in the application. +NOTE: The `greeting-extension` extension has to be installed in the local Maven repository to be usable in the application. -[source, xml] ----- - - - org.acme - quarkus-greeting - 1.0-SNAPSHOT - - - io.quarkus - quarkus-resteasy - - ----- -Run the application and notice the `Install Features` list contains the `quarkus-greeting` extension. +Run the application and notice the `Installed Features` list contains the `greeting-extension` extension. -[source, shell] +[source,shell,subs=attributes+] ---- $ mvn clean compile quarkus:dev [INFO] Scanning for projects... [INFO] -[INFO] ---------------------< org.acme:code-with-quarkus >--------------------- -[INFO] Building code-with-quarkus 1.0.0-SNAPSHOT +[INFO] -----------------------< org.acme:greeting-app >------------------------ +[INFO] Building greeting-app 1.0.0-SNAPSHOT [INFO] --------------------------------[ jar ]--------------------------------- [INFO] -[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ code-with-quarkus --- -[INFO] Deleting /tmp/code-with-quarkus/target +[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ greeting-app --- +[INFO] +[INFO] --- quarkus-maven-plugin:{quarkus-version}:generate-code (default) @ greeting-app --- [INFO] -[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ code-with-quarkus --- +[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ greeting-app --- [INFO] Using 'UTF-8' encoding to copy filtered resources. -[INFO] Copying 2 resources +[INFO] Copying 1 resource [INFO] -[INFO] --- maven-compiler-plugin:3.8.1:compile (default-compile) @ code-with-quarkus --- -[INFO] Changes detected - recompiling the module! -[INFO] Compiling 1 source file to /tmp/code-with-quarkus/target/classes +[INFO] --- maven-compiler-plugin:3.8.1:compile (default-compile) @ greeting-app --- +[INFO] Nothing to compile - all classes are up to date [INFO] -[INFO] --- quarkus-maven-plugin:{quarkus-version}:dev (default-cli) @ code-with-quarkus --- +[INFO] --- quarkus-maven-plugin:{quarkus-version}:dev (default-cli) @ greeting-app --- Listening for transport dt_socket at address: 5005 -__ ____ __ _____ ___ __ ____ ______ - --/ __ \/ / / / _ | / _ \/ //_/ / / / __/ - -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \ ---\___\_\____/_/ |_/_/|_/_/|_|\____/___/ -2020-04-23 14:17:36,137 INFO [io.quarkus] (Quarkus Main Thread) greeting-app 1.0-SNAPSHOT (powered by Quarkus {quarkus-version}) started in 0.985s. Listening on: http://0.0.0.0:8080 -2020-04-23 14:17:36,140 INFO [io.quarkus] (Quarkus Main Thread) Profile dev activated. Live Coding activated. -2020-04-23 14:17:36,140 INFO [io.quarkus] (Quarkus Main Thread) Installed features: [cdi, quarkus-greeting, resteasy, servlet] +__ ____ __ _____ ___ __ ____ ______ + --/ __ \/ / / / _ | / _ \/ //_/ / / / __/ + -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \ +--\___\_\____/_/ |_/_/|_/_/|_|\____/___/ +2021-01-27 10:28:07,240 INFO [io.quarkus] (Quarkus Main Thread) greeting-app 1.0.0-SNAPSHOT on JVM (powered by Quarkus {quarkus-version}) started in 0.531s. Listening on: http://localhost:8080 +2021-01-27 10:28:07,242 INFO [io.quarkus] (Quarkus Main Thread) Profile dev activated. Live Coding activated. +2021-01-27 10:28:07,243 INFO [io.quarkus] (Quarkus Main Thread) Installed features: [cdi, greeting-extension, servlet] ---- From an extension developer standpoint the Maven publication strategy is very handy and fast but Quarkus wants to go one step further by also ensuring a reliability of the ecosystem for the people who will use the extensions. @@ -811,12 +781,30 @@ If the extension solves a general problem, it is very handy for Quarkus users to But this comes with some responsibility for you, keeping it up to date with Quarkus minor releases (every month or so at the moment). When in doubt, have a conversation with the community in the https://groups.google.com/forum/#!forum/quarkus-dev[Quarkus Google Group]. We can make a collective decision. -[NOTE] -As for now, the process to propose a new extension is not defined yet. -Your best chance is to present your extension on the https://groups.google.com/forum/#!forum/quarkus-dev[Quarkus Google Group] and wait for an official invitation to join the Quarkus Platform. +If you want to contribute a new extension, we strongly suggest you to host it in the Quarkiverse Hub. + +*Quarkiverse Hub* + +link:https://github.com/quarkiverse[Quarkiverse Hub] is the GitHub organization that provides repository hosting (including build, CI and release publishing setup) for Quarkus extension projects contributed by the community. + +You can get started by creating an link:https://github.com/quarkusio/quarkus/issues/new/choose[Extension Request] issue (check first if one wasn't already submitted link:https://github.com/quarkusio/quarkus/labels/kind%2Fextension-proposal[here]) and asking to lead it. + +We'll take care of provisioning a new repository and set it up to: + +- Be supported by our tooling; +- Publish the documentation you produce for your extension to the Quarkiverse website; +- Configure your extension to use the link:https://github.com/quarkusio/quarkus-ecosystem-ci#quarkus-ecosystem-ci[Quarkus Ecosystem CI] to build against the latest Quarkus Core changes; +- Give you the freedom to manage the project and release to Maven Central as you like. + +For more information, check link:https://github.com/quarkiverse/quarkiverse/wiki[the Quarkiverse Wiki] and link:https://quarkus.io/blog/quarkiverse/[this blog post]. == Conclusion Creating new extensions may appear to be an intricate task at first but once you understood the Quarkus game-changer paradigm (build time vs runtime) the structure of an extension makes perfectly sense. As usual, along the path Quarkus simplifies things under the hood (Maven Mojo, bytecode generation or testing) to make it pleasant to develop new features. + +== Further reading + +- link:writing-extensions[Writing your own extension] for the full documentation. +- link:dev-console[Quarkus DEV Console] to learn how to support the DEV Console in your extension diff --git a/docs/src/main/asciidoc/building-native-image.adoc b/docs/src/main/asciidoc/building-native-image.adoc index 76ea137e93a3a..9d550738ae7c5 100644 --- a/docs/src/main/asciidoc/building-native-image.adoc +++ b/docs/src/main/asciidoc/building-native-image.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Building a Native Executable @@ -11,18 +11,74 @@ This guide covers: * Compiling the application to a native executable * Packaging the native executable in a container +* Debugging native executable This guide takes as input the application developed in the link:getting-started[Getting Started Guide]. -== Prerequisites +== GraalVM + +Building a native executable requires using a distribution of GraalVM. +There are three distributions: +Oracle GraalVM Community Edition (CE), Oracle GraalVM Enterprise Edition (EE) and Mandrel. +The differences between the Oracle and Mandrel distributions are as follows: + +* Mandrel is a downstream distribution of the Oracle GraalVM CE. +Mandrel's main goal is to provide a way to build native executables specifically designed to support Quarkus. + +* Mandrel releases are built from a code base derived from the upstream Oracle GraalVM CE code base, +with only minor changes but some significant exclusions that are not necessary for Quarkus native apps. +They support the same capabilities to build native executables as Oracle GraalVM CE, +with no significant changes to functionality. +Notably, they do not include support for polyglot programming. +The reason for these exclusions is to provide a better level of support for the majority of Quarkus users. +These exclusions also mean Mandrel offers a considerable reduction in its distribution size +when compared with Oracle GraalVM CE/EE. + +* Mandrel is built slightly differently to Oracle GraalVM CE, using the standard OpenJDK project. +This means that it does not profit from a few small enhancements that Oracle have added to the version of OpenJDK used to build their own GraalVM downloads. +This enhancements are omitted because upstream OpenJDK does not manage them, and cannot vouch for. +This is particularly important when it comes to conformance and security. + +* Mandrel is currently only recommended for building native executables that target Linux containerized environments. +This means that Mandrel users should use containers to build their native executables. +If you are building native executables for macOS or Windows target platforms, +you should consider using Oracle GraalVM instead, +because Mandrel does not currently target these platforms. +Building native executables directly on bare metal Linux is possible, +with details available in the https://github.com/graalvm/mandrel/blob/default/README.md[Mandrel README] +and https://github.com/graalvm/mandrel/releases[Mandrel releases]. + +The prerequisites vary slightly depending on whether you are using Oracle GraalVM CE/EE or Mandrel. + +[IMPORTANT] +.Install the Java 11 version of GraalVM +==== +While Oracle GraalVM is available for both Java 8 and Java 11 (Mandrel only supports Java 11), Quarkus only works with the Java 11 version. +If you use the Oracle distribution, make sure to install the Java 11 version. +==== + +== Prerequisites for Mandrel + +To complete this guide using Mandrel, you need: + +* less than 15 minutes +* an IDE +* JDK 11 installed with `JAVA_HOME` configured appropriately +* A working container runtime (Docker, podman) +* The code of the application developed in the link:getting-started[Getting Started Guide]. + +Skip to <<#container-runtime,this section>> to continue with the guide for Mandrel, +and follow the Mandrel-specific instructions in that section. + +== Prerequisites for Oracle GraalVM CE/EE To complete this guide, you need: * less than 15 minutes * an IDE -* JDK 8 installed with `JAVA_HOME` configured appropriately +* JDK 11 installed with `JAVA_HOME` configured appropriately * A xref:configuring-c-development[working C development environment] -* GraalVM version {graalvm-version} installed and xref:configuring-graalvm[configured appropriately] +* GraalVM version {graalvm-version} (be sure to install the Java 11 support) installed and xref:configuring-graalvm[configured appropriately] * A working container runtime (Docker, podman) * The code of the application developed in the link:getting-started[Getting Started Guide]. @@ -34,7 +90,7 @@ What does having a working C developer environment mean? * On Linux, you will need GCC, and the glibc and zlib headers. Examples for common distributions: + -[source,shell] +[source,bash] ---- # dnf (rpm-based) sudo dnf install gcc glibc-devel zlib-devel libstdc++-static @@ -43,7 +99,7 @@ sudo apt-get install build-essential libz-dev zlib1g-dev ---- * XCode provides the required dependencies on macOS: + -[source,shell] +[source,bash] ---- xcode-select --install ---- @@ -60,19 +116,20 @@ If you cannot install GraalVM, you can use a multi-stage Docker build to run Mav Version {graalvm-version} is required. Using the community edition is enough. -1. Install GraalVM if you haven't already. You have a few options for this: +1. Install GraalVM (pick the java 11 version) if you haven't already. You have a few options for this: ** Use platform-specific install tools like https://github.com/graalvm/homebrew-tap[homebrew], https://sdkman.io/jdks#Oracle[sdkman], or https://github.com/ScoopInstaller/Java[scoop]. ** Download the appropriate Community Edition archive from , and unpack it like you would any other JDK. +Make sure to download and install at Java 11 version. 2. Configure the runtime environment. Set `GRAALVM_HOME` environment variable to the GraalVM installation directory, for example: + -[source,shell] +[source,bash] ---- export GRAALVM_HOME=$HOME/Development/graalvm/ ---- + On macOS, point the variable to the `Home` sub-directory: + -[source,shell] +[source,bash] ---- export GRAALVM_HOME=$HOME/Development/graalvm/Contents/Home/ ---- @@ -85,7 +142,7 @@ Installing via scoop will do this for you. ==== 3. Install the `native-image` tool using `gu install`: + -[source,shell] +[source,bash] ---- ${GRAALVM_HOME}/bin/gu install native-image ---- @@ -93,13 +150,13 @@ ${GRAALVM_HOME}/bin/gu install native-image Some previous releases of GraalVM included the `native-image` tool by default. This is no longer the case; it must be installed as a second step after GraalVM itself is installed. Note: there is an outstanding issue xref:graal-and-catalina[using GraalVM with macOS Catalina]. 4. (Optional) Set the `JAVA_HOME` environment variable to the GraalVM installation directory. + -[source,shell] +[source,bash] ---- export JAVA_HOME=${GRAALVM_HOME} ---- 5. (Optional) Add the GraalVM `bin` directory to the path + -[source,shell] +[source,bash] ---- export PATH=${GRAALVM_HOME}/bin:$PATH ---- @@ -110,14 +167,14 @@ export PATH=${GRAALVM_HOME}/bin:$PATH ==== GraalVM binaries are not (yet) notarized for macOS Catalina as reported in this https://github.com/oracle/graal/issues/1724[GraalVM issue]. This means that you may see the following error when using `gu`: -[source,shell] +[source,bash] ---- “gu” cannot be opened because the developer cannot be verified ---- Use the following command to recursively delete the `com.apple.quarantine` extended attribute on the GraalVM install directory as a workaround: -[source,shell] +[source,bash] ----- xattr -r -d com.apple.quarantine ${GRAALVM_HOME}/../.. ----- @@ -177,14 +234,14 @@ the `x64 Native Tools Command Prompt` that was installed with the Visual Studio Another solution is to write a script to do this for you: -[source,shell] +[source,bash] ---- cmd /c 'call "C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Auxiliary\Build\vcvars64.bat" && mvn package -Pnative' ---- ==== -In addition to the regular files, the build also produces `target/getting-started-1.0-SNAPSHOT-runner`. -You can run it using: `./target/getting-started-1.0-SNAPSHOT-runner`. +In addition to the regular files, the build also produces `target/getting-started-1.0.0-SNAPSHOT-runner`. +You can run it using: `./target/getting-started-1.0.0-SNAPSHOT-runner`. == Testing the native executable @@ -241,16 +298,16 @@ The executable is retrieved using the `native.image.path` system property config To see the `NativeGreetingResourceIT` run against the native executable, use `./mvnw verify -Pnative`: [source,shell] ---- -./mvnw verify -Pnative +$ ./mvnw verify -Pnative ... -[getting-started-1.0-SNAPSHOT-runner:18820] universe: 587.26 ms -[getting-started-1.0-SNAPSHOT-runner:18820] (parse): 2,247.59 ms -[getting-started-1.0-SNAPSHOT-runner:18820] (inline): 1,985.70 ms -[getting-started-1.0-SNAPSHOT-runner:18820] (compile): 14,922.77 ms -[getting-started-1.0-SNAPSHOT-runner:18820] compile: 20,361.28 ms -[getting-started-1.0-SNAPSHOT-runner:18820] image: 2,228.30 ms -[getting-started-1.0-SNAPSHOT-runner:18820] write: 364.35 ms -[getting-started-1.0-SNAPSHOT-runner:18820] [total]: 52,777.76 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] universe: 587.26 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] (parse): 2,247.59 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] (inline): 1,985.70 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] (compile): 14,922.77 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] compile: 20,361.28 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] image: 2,228.30 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] write: 364.35 ms +[getting-started-1.0.0-SNAPSHOT-runner:18820] [total]: 52,777.76 ms [INFO] [INFO] --- maven-failsafe-plugin:2.22.1:integration-test (default) @ getting-started --- [INFO] @@ -258,7 +315,7 @@ To see the `NativeGreetingResourceIT` run against the native executable, use `./ [INFO] T E S T S [INFO] ------------------------------------------------------- [INFO] Running org.acme.quickstart.NativeGreetingResourceIT -Executing [/data/home/gsmet/git/quarkus-quickstarts/getting-started/target/getting-started-1.0-SNAPSHOT-runner, -Dquarkus.http.port=8081, -Dtest.url=http://localhost:8081, -Dquarkus.log.file.path=build/quarkus.log] +Executing [/data/home/gsmet/git/quarkus-quickstarts/getting-started/target/getting-started-1.0.0-SNAPSHOT-runner, -Dquarkus.http.port=8081, -Dtest.url=http://localhost:8081, -Dquarkus.log.file.path=build/quarkus.log] 2019-04-15 11:33:20,348 INFO [io.quarkus] (main) Quarkus 999-SNAPSHOT started in 0.002s. Listening on: http://[::]:8081 2019-04-15 11:33:20,348 INFO [io.quarkus] (main) Installed features: [cdi, resteasy] [INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.387 s - in org.acme.quickstart.NativeGreetingResourceIT @@ -272,6 +329,12 @@ duration can be changed using the `quarkus.test.native-image-wait-time` system p to 300 seconds, use: `./mvnw verify -Pnative -Dquarkus.test.native-image-wait-time=300`. ==== +[WARNING] +==== +In the future, `@NativeImageTest` will be deprecated in favor of `@QuarkusIntegrationTest` which provides a superset of the testing +capabilities of `@NativeImageTest`. More information about `@QuarkusIntegrationTest` can be found in the link:getting-started-testing#quarkus-integration-test[Testing Guide]. +==== + By default, native tests runs using the `prod` profile. This can be overridden using the `quarkus.test.native-image-profile` property. For example, in your `application.properties` file, add: `quarkus.test.native-image-profile=test`. @@ -311,26 +374,53 @@ to install as little software as possible). To this end, Quarkus provides a very convenient way of creating a native Linux executable by leveraging a container runtime such as Docker or podman. The easiest way of accomplishing this task is to execute: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative -Dquarkus.native.container-build=true ---- [TIP] ==== -You can also select the container runtime to use with: -[source,shell] +By default Quarkus automatically detects the container runtime. +If you want to explicitely select the container runtime, you can do it with: +[source,bash] ---- # Docker -./mvnw package -Pnative -Dquarkus.native.container-runtime=docker +./mvnw package -Pnative -Dquarkus.native.container-build=true -Dquarkus.native.container-runtime=docker # Podman -./mvnw package -Pnative -Dquarkus.native.container-runtime=podman +./mvnw package -Pnative -Dquarkus.native.container-build=true -Dquarkus.native.container-runtime=podman ---- - These are normal Quarkus config properties, so if you always want to build in a container it is recommended you add these to your `application.properties` in order to avoid specifying them every time. +==== + +[TIP] +==== +If you see the following invalid path error for your application JAR when trying to create a native executable using a container build, even though your JAR was built successfully, you're most likely using a remote daemon for your container runtime. +---- +Error: Invalid Path entry getting-started-1.0.0-SNAPSHOT-runner.jar +Caused by: java.nio.file.NoSuchFileException: /project/getting-started-1.0.0-SNAPSHOT-runner.jar +---- +In this case, use the parameter `-Dquarkus.native.remote-container-build=true` instead of `-Dquarkus.native.container-build=true`. +The reason for this is that the local build driver invoked through `-Dquarkus.native.container-build=true` uses volume mounts to make the JAR available in the build container, but volume mounts do not work with remote daemons. The remote container build driver copies the necessary files instead of mounting them. Note that even though the remote driver also works with local daemons, the local driver should be preferred in the local case because mounting is usually more performant than copying. +==== + +[TIP] +==== +Building with Mandrel requires a custom builder image parameter to be passed additionally: +[source,bash,subs=attributes+] +---- +./mvnw package -Pnative -Dquarkus.native.container-build=true -Dquarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel:{mandrel-flavor} +---- + +Please note that the above command points to a floating tag. +It is highly recommended to use the floating tag, +so that your builder image remains up-to-date and secure. +If you absolutely must, you may hard-code to a specific tag +(see https://quay.io/repository/quarkus/ubi-quarkus-mandrel?tab=tags[here] for available tags), +but be aware that you won't get security updates that way and it's unsupported. ==== == Creating a container @@ -341,7 +431,7 @@ By far the easiest way to create a container-image from your Quarkus application If one of those extensions is present, then creating a container image for the native executable is essentially a matter of executing a single command: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative -Dquarkus.native.container-build=true -Dquarkus.container-image.build=true ---- @@ -392,14 +482,14 @@ You can read more about UBI on: Then, if you didn't delete the generated native executable, you can build the docker image with: -[source,shell] +[source,bash] ---- docker build -f src/main/docker/Dockerfile.native -t quarkus-quickstart/getting-started . ---- And finally, run it with: -[source,shell] +[source,bash] ---- docker run -i --rm -p 8080:8080 quarkus-quickstart/getting-started ---- @@ -410,10 +500,11 @@ NOTE: If you are interested in tiny Docker images, check the {quarkus-images-url The previous section showed you how to build a native executable using Maven, but implicitly required that the proper GraalVM version be installed on the building machine (be it your local machine or your CI/CD infrastructure). -In cases where the GraalVM requirement cannot be met, you can use Docker to perform the Maven build by using a multi-stage Docker build. A multi-stage Docker build is like two Dockerfile files combined in one, the first is used to build the artifact used by the second. +In cases where the GraalVM requirement cannot be met, you can use Docker to perform the Maven or Gradle build by using a multi-stage Docker build. A multi-stage Docker build is like two Dockerfile files combined in one, the first is used to build the artifact used by the second. -In this guide we will use the first stage to generate the native executable using Maven and the second stage to create our runtime image. +In this guide we will use the first stage to generate the native executable and the second stage to create our runtime image. +Sample Dockerfile for building with Maven: [source,dockerfile,subs=attributes+] ---- ## Stage 1 : build with maven builder image with native capabilities @@ -443,22 +534,46 @@ USER 1001 CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] ---- - Save this file in `src/main/docker/Dockerfile.multistage` as it is not included in the getting started quickstart. +Sample Dockerfile for building with Gradle: +[source,dockerfile,subs=attributes+] +---- +## Stage 1 : build with maven builder image with native capabilities +FROM quay.io/quarkus/centos-quarkus-maven:{graalvm-version}-java8 AS build +COPY src /usr/src/app/src +COPY build.gradle /usr/src/app +COPY settings.gradle /usr/src/app +COPY gradle.properties /usr/src/app +USER root +RUN chown -R quarkus /usr/src/app +USER quarkus +RUN gradle -b /usr/src/app/build.gradle clean buildNative + +## Stage 2 : create the docker final image +FROM registry.access.redhat.com/ubi8/ubi-minimal +WORKDIR /work/ +COPY --from=build /usr/src/app/build/*-runner /work/application +RUN chmod 775 /work +EXPOSE 8080 +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] +---- + +If you are using Gradle in your project, you can use this sample Dockerfile. Save it in `src/main/docker/Dockerfile.multistage`. + [WARNING] ==== Before launching our Docker build, we need to update the default `.dockerignore` file as it filters everything except the `target` directory and as we plan to build inside a container we need to be able to copy the `src` directory. So edit your `.dockerignore` and remove or comment its content. ==== -[source,shell] +[source,bash] ---- docker build -f src/main/docker/Dockerfile.multistage -t quarkus-quickstart/getting-started . ---- And finally, run it with: -[source,shell] +[source,bash] ---- docker run -i --rm -p 8080:8080 quarkus-quickstart/getting-started ---- @@ -470,6 +585,76 @@ If you need SSL support in your native executable, you can easily include the ne Please see link:native-and-ssl#working-with-containers[our Using SSL With Native Executables guide] for more information. ==== +== Debugging native executable + +Starting with Oracle GraalVM 20.2 or Mandrel 20.1, +debug symbols for native executables can be generated for Linux environments +(Windows support is still under development). +These symbols can be used to debug native executables with tools such as `gdb`. + +To generate debug symbols, +add `-Dquarkus.native.debug.enabled=true` flag when generating the native executable. +You will find the debug symbols for the native executable in a `.debug` file next to the native executable. + +[NOTE] +==== +The generation of the `.debug` file depends on `objcopy`. +On common Linux distributions and macOS you will need to install the `binutils` package: + +[source,bash] +---- +# dnf (rpm-based) +sudo dnf install binutils +# Debian-based distributions +sudo apt-get install binutils +# macOS +brew install binutils +export PATH=/usr/local/opt/binutils/bin:$PATH +---- + +When `objcopy` is not available debug symbols are embedded in the executable. +==== + +Aside from debug symbols, +setting `-Dquarkus.native.debug.enabled=true` flag generates a cache of source files +for any JDK runtime classes, GraalVM classes and application classes resolved during native executable generation. +This source cache is useful for native debugging tools, +to establish the link between the symbols and matching source code. +It provides a convenient way of making just the necessary sources available to the debugger/IDE when debugging a native executable. + +Sources for third party jar dependencies, including Quarkus source code, +are not added to the source cache by default. +To include those, make sure you invoke `mvn dependency:sources` first. +This step is required in order to pull the sources for these dependencies, +and get them included in the source cache. + +The source cache is located in the `target/sources` folder. + +[TIP] +==== +If running `gdb` from a different directory than `target`, then the sources can be loaded by running: + +[source,bash] +---- +directory path/to/target +---- + +in the `gdb` prompt. + +Or start `gdb` with: + +[source,bash] +---- +gdb -ex 'directory path/to/target' path/to/target/{project.name}-{project.version}-runner +---- + +e.g., +[source,bash] +---- +gdb -ex 'directory ./target' ./target/getting-started-1.0.0-SNAPSHOT-runner +---- +==== + [[configuration-reference]] == Configuring the Native Executable diff --git a/docs/src/main/asciidoc/building-substrate-howto.adoc b/docs/src/main/asciidoc/building-substrate-howto.adoc index b5d0d0d678bc5..e5466d9f4c54d 100644 --- a/docs/src/main/asciidoc/building-substrate-howto.adoc +++ b/docs/src/main/asciidoc/building-substrate-howto.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Building a Custom SubstrateVM @@ -22,7 +22,7 @@ auth param. After obtaining the download install it and set `JAVA_HOME` -[source,shell] +[source,bash] ---- wget -O jvmci.tgz http://download.oracle.com/otn/utilities_drivers/oracle-labs/labsjdk-8u202-jvmci-0.56-darwin-amd64.tar.gz?AuthParam=[GENERATED AUTH TOKEN HERE] tar -xzvf jvmci.tgz -C /opt @@ -39,7 +39,7 @@ the minimum requirements. Now you need to install Graal’s special build tool, `mx`. -[source,shell] +[source,bash] ---- git clone https://github.com/graalvm/mx.git export PATH=`pwd`/mx:$PATH @@ -49,7 +49,7 @@ export PATH=`pwd`/mx:$PATH You can now check-out and build Substrate: -[source,shell] +[source,bash] ---- git clone https://github.com/oracle/graal.git cd graal/substratevm @@ -58,7 +58,7 @@ mx build Once built, you can quickly run the tools in place -[source,shell] +[source,bash] ---- echo "public class HelloWorld { public static void main(String[] args) { System.out.println(\"Hello World\"); } }" > HelloWorld.java javac HelloWorld.java @@ -75,7 +75,7 @@ build options to pick the modules in your distribution. For a simple subset VM with just substrate and its native tools execute the following: -[source,shell] +[source,bash] ---- cd ../vm mx --dy /substratevm,/tools build @@ -84,7 +84,7 @@ the following: For a stock CE build the following will do: -[source,shell] +[source,bash] ---- mx --dy /substratevm,/tools,/sulong,/graal-nodejs build tar -czvf my-custom-graal.tgz -C latest_graalvm . diff --git a/docs/src/main/asciidoc/cache.adoc b/docs/src/main/asciidoc/cache.adoc index 3e0a34619b97c..9361e964e27fa 100644 --- a/docs/src/main/asciidoc/cache.adoc +++ b/docs/src/main/asciidoc/cache.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Application Data Caching :extension-status: preview @@ -36,23 +36,23 @@ However, you can go right to the completed example. Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. -The solution is located in the `cache-quickstart` directory. +The solution is located in the `cache-quickstart` {quickstarts-tree-url}/cache-quickstart[directory]. == Creating the Maven project First, we need to create a new Quarkus project using Maven with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=cache-quickstart \ -DclassName="org.acme.cache.WeatherForecastResource" \ -Dpath="/weather" \ - -Dextensions="cache,resteasy-jsonb" + -Dextensions="resteasy,cache,resteasy-jackson" ---- -This command generates the Maven project with a REST endpoint and imports the `cache` and `resteasy-jsonb` extensions. +This command generates the Maven project with a REST endpoint and imports the `cache` and `resteasy-jackson` extensions. If you already have your Quarkus project configured, you can add the `cache` extension to your project by running the following command in your project base directory: @@ -158,7 +158,6 @@ import java.util.List; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.jboss.resteasy.annotations.jaxrs.QueryParam; @@ -170,7 +169,6 @@ public class WeatherForecastResource { WeatherForecastService service; @GET - @Produces(MediaType.APPLICATION_JSON) public WeatherForecast getForecast(@QueryParam String city, @QueryParam long daysInFuture) { <1> long executionStart = System.currentTimeMillis(); List dailyForecasts = Arrays.asList( @@ -193,7 +191,7 @@ First, run the application using `./mvnw compile quarkus:dev` from the project d Then, call `http://localhost:8080/weather?city=Raleigh` from a browser. After six long seconds, the application will answer something like this: -[source] +[source,json] ---- {"dailyForecasts":["MONDAY will be cloudy in Raleigh","TUESDAY will be chilly in Raleigh","WEDNESDAY will be rainy in Raleigh"],"executionTimeInMs":6001} ---- @@ -276,6 +274,12 @@ The following sections will show you everything there is to know about it. Quarkus offers a set of annotations that can be used in a CDI managed bean to enable caching abilities. +[WARNING] +==== +Caching annotations are not allowed on private methods. +They will work fine with any other access modifier including package-private (no explicit modifier). +==== + === @CacheResult Loads a method result from the cache without executing the method body whenever possible. @@ -283,9 +287,9 @@ Loads a method result from the cache without executing the method body whenever When a method annotated with `@CacheResult` is invoked, Quarkus will compute a cache key and use it to check in the cache whether the method has been already invoked. If the method has one or more arguments, the key computation is done from all the method arguments if none of them is annotated with `@CacheKey`, or all the arguments annotated with `@CacheKey` otherwise. Each non-primitive method argument that is part of the key must implement `equals()` and `hashCode()` correctly for the cache to work as expected. -This annotation can also be used on a method with no arguments, a default key derived from the cache name is generated in that case. +This annotation can also be used on a method with no arguments, a default key derived from the cache name is used in that case. If a value is found in the cache, it is returned and the annotated method is never actually executed. -If no value is found, the annotated method is invoked and the returned value is stored in the cache using the computed or generated key. +If no value is found, the annotated method is invoked and the returned value is stored in the cache using the computed key. A method annotated with `CacheResult` is protected by a lock on cache miss mechanism. If several concurrent invocations try to retrieve a cache value from the same missing key, the method will only be invoked once. @@ -308,14 +312,9 @@ Removes an entry from the cache. When a method annotated with `@CacheInvalidate` is invoked, Quarkus will compute a cache key and use it to try to remove an existing entry from the cache. If the method has one or more arguments, the key computation is done from all the method arguments if none of them is annotated with `@CacheKey`, or all the arguments annotated with `@CacheKey` otherwise. -This annotation can also be used on a method with no arguments, a default key derived from the cache name is generated in that case. +This annotation can also be used on a method with no arguments, a default key derived from the cache name is used in that case. If the key does not identify any cache entry, nothing will happen. -[TIP] -==== -If the `@CacheResult` or `@CacheInvalidate` annotations are used on a method with no parameters, a unique default cache key derived from the cache name will be generated and used. -==== - === @CacheInvalidateAll When a method annotated with `@CacheInvalidateAll` is invoked, Quarkus will remove all entries from the cache. @@ -327,6 +326,49 @@ method annotated with `@CacheResult` or `@CacheInvalidate`. This annotation is optional and should only be used when some of the method arguments are NOT part of the cache key. +=== Composite cache key building logic + +When a cache key is built from several method arguments, whether they are explicitly identified with `@CacheKey` or not, the building logic depends on the order of these arguments in the method signature. On the other hand, the arguments names are not used at all and do not have any effect on the cache key. + +[source,java] +---- +package org.acme.cache; + +import javax.enterprise.context.ApplicationScoped; + +import io.quarkus.cache.CacheInvalidate; +import io.quarkus.cache.CacheResult; + +@ApplicationScoped +public class CachedService { + + @CacheResult(cacheName = "foo") + public Object load(String keyElement1, Integer keyElement2) { + // Call expensive service here. + } + + @CacheInvalidate(cacheName = "foo") + public void invalidate1(String keyElement2, Integer keyElement1) { <1> + } + + @CacheInvalidate(cacheName = "foo") + public void invalidate2(Integer keyElement2, String keyElement1) { <2> + } + + @CacheInvalidate(cacheName = "foo") + public void invalidate3(Object notPartOfTheKey, @CacheKey String keyElement1, @CacheKey Integer keyElement2) { <3> + } + + @CacheInvalidate(cacheName = "foo") + public void invalidate4(Object notPartOfTheKey, @CacheKey Integer keyElement2, @CacheKey String keyElement1) { <4> + } +} +---- +<1> Calling this method WILL invalidate values cached by the `load` method even if the key elements names have been swapped. +<2> Calling this method WILL NOT invalidate values cached by the `load` method because the key elements order is different. +<3> Calling this method WILL invalidate values cached by the `load` method because the key elements order is the same. +<4> Calling this method WILL NOT invalidate values cached by the `load` method because the key elements order is different. + == Configuring the underlying caching provider This extension uses https://github.com/ben-manes/caffeine[Caffeine] as its underlying caching provider. @@ -356,17 +398,6 @@ quarkus.cache.caffeine."bar".maximum-size=1000 <2> <1> The `foo` cache is being configured. <2> The `bar` cache is being configured. -== Context propagation - -This extension relies on non-blocking calls internally for cache values computations. -By default, there's no context propagation between the calling thread (from your application) and a thread that performs such a computation. - -The context propagation can be enabled for this extension by simply adding the `quarkus-smallrye-context-propagation` extension to your project. - -If you see a `javax.enterprise.context.ContextNotActiveException` in your application log during a cache computation, then you probably need to enable the context propagation. - -You can find more information about context propagation in Quarkus in the link:context-propagation[dedicated guide]. - == Annotated beans examples === Implicit simple cache key @@ -461,7 +492,7 @@ public class CachedService { } } ---- -<1> A unique default cache key derived from the cache name is generated and used. +<1> A unique default cache key derived from the cache name is used because the method has no arguments. === Multiple annotations on a single method @@ -496,7 +527,7 @@ public class CachedService { [#negative-cache] == Negative caching and nulls -Sometimes one wants to cache the results of an (expensive) remote call. +Sometimes one wants to cache the result of an (expensive) remote call. If the remote call fails, one may not want to cache the result or exception, but rather re-try the remote call on the next invocation. @@ -504,15 +535,15 @@ A simple approach could be to catch the exception and return `null`, so that the act accordingly: .Sample code -[souce,java] +[source,java] ---- public void caller(int val) { - Integer result = callRemote(val); //<1> - if (result == null) { + Integer result = callRemote(val); //<1> + if (result != null) { System.out.println("Result is " + result); else { - System.out.println("Got an exception"); + System.out.println("Got an exception"); } } @@ -520,10 +551,10 @@ act accordingly: private Integer callRemote(int val) { try { - Integer val = remoteWebServer.getResult(val); //<2> + Integer val = remoteWebServer.getResult(val); //<2> return val; } catch (Exception e) { - return null; // <3> + return null; // <3> } } ---- @@ -542,7 +573,7 @@ To prevent the cache from caching (marker) results from a remote call, we need t the exception bubble out of the called method and catch it at the caller side: .With Exception bubbling up -[souce,java] +[source,java] ---- public void caller(int val) { try { @@ -567,11 +598,4 @@ the exception bubble out of the called method and catch it at the caller side: When the call to the remote throws an exception, the cache does not store the result, so that a subsequent call to `callRemote()` with the same parameter value will not be answered out of the cache. -It will instead result in another -attempt to call the remote. - -The previous code example has the side-effect that the cache logs the thrown exception -with a long stack trace to the console. This is done to inform developers that something -exceptional has happened, but if you have a setup like above, you are already catching -the exception and know what you are doing. - +It will instead result in another attempt to call the remote. diff --git a/docs/src/main/asciidoc/camel.adoc b/docs/src/main/asciidoc/camel.adoc index 7c5111ca92536..6a9f60131a8c7 100644 --- a/docs/src/main/asciidoc/camel.adoc +++ b/docs/src/main/asciidoc/camel.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Apache Camel on Quarkus diff --git a/docs/src/main/asciidoc/cassandra.adoc b/docs/src/main/asciidoc/cassandra.adoc index 89dc0314cae2a..dddcc695b8aaa 100644 --- a/docs/src/main/asciidoc/cassandra.adoc +++ b/docs/src/main/asciidoc/cassandra.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the Cassandra Client @@ -66,6 +66,11 @@ The most important part of the `pom.xml` is adding the `cassandra-quarkus` exten
    ---- +Also make sure to follow the +link:https://docs.datastax.com/en/developer/java-driver/latest/manual/mapper/config/[instructions] +on how to add an annotation processor to the compiler configuration. When the project is compiled, +additional mapper classes are generated. + == Creating JSON REST service In this example, we will create an application to manage a list of fruits. @@ -179,6 +184,9 @@ public class FruitDaoProducer { Note how the `QuarkusCqlSession` instance is injected automatically by the cassandra-quarkus extension in the `FruitDaoProducer` constructor. +Also note that `FruitMapperBuilder` is one of the classes generated automatically by the +`java-driver-mapper-processor` annotation processor. + Now create a `FruitService` that will be the business layer of our application and store/load the fruits from the Cassandra database. @@ -212,8 +220,6 @@ The last missing piece is the REST API that will expose GET and POST methods: [source,java] ---- @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { private static final String STORE_NAME = "acme"; @@ -345,7 +351,7 @@ following CQL query: `SELECT data_center FROM system.local`. If you want to use Docker to run a Cassandra database, you can use the following command to launch one: -[source,shell] +[source,bash] ---- docker run \ --name local-cassandra-instance \ @@ -365,18 +371,18 @@ like JMX monitoring of the Cassandra instance. Next you need to create the keyspace and table that will be used by your application. If you are using Docker, run the following commands: -[source,shell] +[source,bash] ---- docker exec -it local-cassandra-instance cqlsh -e "CREATE KEYSPACE IF NOT EXISTS k1 WITH replication = {'class':'SimpleStrategy', 'replication_factor':1}" -docker exec -it local-cassandra-instance cqlsh -e "CREATE TABLE IF NOT EXISTS k1.fruit(id text, name text, description text, PRIMARY KEY((id), name))" +docker exec -it local-cassandra-instance cqlsh -e "CREATE TABLE IF NOT EXISTS k1.fruit(store_id text, name text, description text, PRIMARY KEY((store_id), name))" ---- If you're running Cassandra locally you can execute the cqlsh commands directly: -[source,shell] +[source,bash] ---- cqlsh -e "CREATE KEYSPACE IF NOT EXISTS k1 WITH replication = {'class':'SimpleStrategy', 'replication_factor':1} -cqlsh -e "CREATE TABLE IF NOT EXISTS k1.fruit(id text, name text, description text, PRIMARY KEY((id), name)) +cqlsh -e "CREATE TABLE IF NOT EXISTS k1.fruit(store_id text, name text, description text, PRIMARY KEY((store_id), name)) ---- == Creating a frontend @@ -400,7 +406,7 @@ When using `QuarkusCqlSession` you have access to reactive variant of methods th Quarkus and Mutiny. TIP: If you're not familiar with Mutiny, read the -link:https://quarkus.io/guides/getting-started-reactive[Getting Started with Reactive guide] first. +link:getting-started-reactive[Getting Started with Reactive guide] first. Let's rewrite the previous example using reactive programming with Mutiny. @@ -542,7 +548,7 @@ You can now interact with your reactive REST service: If you are using the `quarkus-smallrye-health` extension, `cassandra-quarkus` will automatically add a readiness health check to validate the connection to the cluster. -So when you access the `/health/ready` endpoint of your application you will have information about +So when you access the `/q/health/ready` endpoint of your application you will have information about the connection validation status. TIP: This behavior can be disabled by setting the `quarkus.cassandra.health.enabled` property to @@ -577,7 +583,7 @@ For the full list of available metrics, please refer to the link:https://docs.datastax.com/en/developer/java-driver/latest/manual/core/configuration/reference/[driver settings reference] and the `advanced.metrics` section. -When metrics are properly enabled and when you access the `/metrics` endpoint of your application, +When metrics are enabled and you access the `/q/metrics` endpoint of your application, you will see metric reports for all enabled metrics. == Building a native executable @@ -594,4 +600,4 @@ You can then point your browser to `http://localhost:8080/fruits.html` and use y Accessing a Cassandra database from a client application is easy with Quarkus and the Cassandra extension, which provides configuration and native support for the DataStax Java driver for -Apache Cassandra. \ No newline at end of file +Apache Cassandra. diff --git a/docs/src/main/asciidoc/cdi-integration.adoc b/docs/src/main/asciidoc/cdi-integration.adoc new file mode 100644 index 0000000000000..93e2440aa1a9f --- /dev/null +++ b/docs/src/main/asciidoc/cdi-integration.adoc @@ -0,0 +1,556 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - CDI Integration Guide + +include::./attributes.adoc[] +:numbered: +:toc: +:toclevels: 2 + +ArC, the CDI container, is bootstrapped at build time. +The downside of this approach is that CDI Portable Extensions cannot be supported. +Nevertheless, the functionality can be achieved using the Quarkus-specific extensions API. + +The container is bootstrapped in multiple phases. +From a high level perspective these phases go as follows: + +1. Initialization +2. Bean discovery +3. Registration of synthetic components +4. Validation + +In the _initialization_ phase the preparatory work is being carried out and custom contexts are registered. +_Bean discovery_ is then the process where the container analyzes all application classes, identifies beans and wires them all together based on the provided metadata. +Subsequently, the extensions can register _synthetic components_. +Attributes of these components are fully controlled by the extensions, i.e. are not derived from an existing class. +Finally, the _deployment is validated_. +For example, the container validates every injection point in the application and fails the build if there is no bean that satisfies the given required type and qualifiers. + +TIP: You can see more information about the bootstrap by enabling additional logging. Simply run the Maven build with `-X` or `--debug` and grep the lines that contain `io.quarkus.arc`. In the <>, you can use `quarkus.log.category."io.quarkus.arc.processor".level=DEBUG` and two special endpoints are also registered automatically to provide some basic debug info in the JSON format. + +Quarkus build steps can produce and consume various build items and hook into each phase. +In the following sections we will describe all the relevant build items and common scenarios. + +== Metadata Sources + +Classes and annotations are the primary source of bean-level metadata. +The initial metadata are read from the _bean archive index_, an immutable https://github.com/wildfly/jandex[Jandex index, window="_blank"] which is built from various sources during <>. +However, extensions can add, remove or transform the metadata at certain stages of the bootstrap. +Moreover, extensions can also register <>. +This is an important aspect to realize when integrating CDI components in Quarkus. + +This way, extensions can turn classes, that would be otherwise ignored, into beans and vice versa. +For example, a class that declares a `@Scheduled` method is always registered as a bean even if it is not annotated with a bean defining annotation and would be normally ignored. + +:sectnums: +:sectnumlevels: 4 + +== Use Case - My Class Is Not Recognized as a Bean + +An `UnsatisfiedResolutionException` indicates a problem during <>. +Sometimes an injection point cannot be satisfied even if there is a class on the classpath that appears to be eligible for injection. +There are several reasons why a class is not recognized and also several ways to fix it. +In the first step we should identify the _reason_. + +[[additional_bean_build_item]] +=== _Reason 1_: Class Is Not discovered + +Quarkus has a <>. +It might happen that the class is not part of the application index. +For example, classes from the _runtime module_ of a Quarkus extension are not indexed automatically. + +_Solution_: Use the `AdditionalBeanBuildItem`. +This build item can be used to specify one or more additional classes to be analyzed during the discovery. +Additional bean classes are transparently added to the application index processed by the container. + +.`AdditionalBeanBuildItem` Example +[source,java] +---- +@BuildStep +AdditionalBeanBuildItem additionalBeans() { + return new AdditionalBeanBuildItem(SmallRyeHealthReporter.class, HealthServlet.class)); <1> +} +---- +<1> `AdditionalBeanBuildItem.Builder` can be used for more complex use cases. + +Bean classes added via `AdditionalBeanBuildItem` are _removable_ by default. +If the container considers them <>, they are just ignored. +However, you can use `AdditionalBeanBuildItem.Builder.setUnremovable()` method to instruct the container to never remove bean classes registered via this build item. +See also <> and <> for more details. + +It is aso possible to set the default scope via `AdditionalBeanBuildItem.Builder#setDefaultScope()`. +The default scope is only used if there is no scope declared on the bean class. + +NOTE: If no default scope is specified the `@Dependent` pseudo-scope is used. + +=== _Reason 2_: Class Is Discovered but Has No Bean Defining Annotation + +In Quarkus, the application is represented by a single bean archive with the https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#default_bean_discovery[bean discovery mode `annotated`, window="_blank"]. +Therefore, bean classes that don't have a https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#bean_defining_annotations[bean defining annotation, window="_blank"] are ignored. +Bean defining annotations are declared on the class-level and incluce scopes, stereotypes and `@Interceptor`. + +_Solution 1_: Use the `AutoAddScopeBuildItem`. This build item can be used to add a scope to a class that meets certain conditions. + +.`AutoAddScopeBuildItem` Example +[source,java] +---- +@BuildStep +AutoAddScopeBuildItem autoAddScope() { + return AutoAddScopeBuildItem.builder().containsAnnotations(SCHEDULED_NAME, SCHEDULES_NAME) <1> + .defaultScope(BuiltinScope.SINGLETON) <2> + .build(); +} +---- +<1> Find all classes annotated with `@Scheduled`. +<2> Add `@Singleton` as default scope. Classes already annotated with a scope are skipped automatically. + +_Solution 2_: If you need to process classes annotated with a specific annotation then it's possible to extend the set of bean defining annotations via the `BeanDefiningAnnotationBuildItem`. + +.`BeanDefiningAnnotationBuildItem` Example +[source,java] +---- +@BuildStep +BeanDefiningAnnotationBuildItem additionalBeanDefiningAnnotation() { + return new BeanDefiningAnnotationBuildItem(Annotations.GRAPHQL_API); <1> +} +---- +<1> Add `org.eclipse.microprofile.graphql.GraphQLApi` to the set of bean defining annotations. + +Bean classes added via `BeanDefiningAnnotationBuildItem` are _not removable_ by default, i.e. the resulting beans must not be removed even if they are considered unused. +However, you can change the default behavior. +See also <> and <> for more details. + +It is also possible to specify the default scope. +The default scope is only used if there is no scope declared on the bean class. + +NOTE: If no default scope is specified the `@Dependent` pseudo-scope is used. + +[[unremovable_builditem]] +=== _Reason 3_: Class Was Discovered and Has a Bean Defining Annotation but Was Removed + +The container attempts to <> during the build by default. +This optimization allows for _framework-level dead code elimination_. +In few special cases, it's not possible to correctly identify an unused bean. +In particular, Quarkus is not able to detect the usage of the `CDI.current()` static method yet. +Extensions can eliminate possible false positives by producing an `UnremovableBeanBuildItem`. + +.`UnremovableBeanBuildItem` Example +[source,java] +---- +@BuildStep +UnremovableBeanBuildItem unremovableBeans() { + return UnremovableBeanBuildItem.targetWithAnnotation(STARTUP_NAME); <1> +} +---- +<1> Make all classes annotated with `@Startup` unremovable. + +== Use Case - My Annotation Is Not Recognized as a Qualifier or an Interceptor Binding + +It is likely that the annotation class is not part of the application index. +For example, classes from the _runtime module_ of a Quarkus extension are not indexed automatically. + +_Solution_: Use the `AdditionalBeanBuildItem` as described in <>. + +[[annotations_transformer_build_item]] +== Use Case - I Need To Transform Metadata + +In some cases, it's useful to be able to modify the metadata. +Quarkus provides a powerful alternative to https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#process_annotated_type[`javax.enterprise.inject.spi.ProcessAnnotatedType`, window="_blank"]. +With an `AnnotationsTransformerBuildItem` it's possible to override the annotations that exist on bean classes. + +For example, you might want to add an interceptor binding to a specific bean class. +Here is how to do it: + +.`AnnotationsTransformerBuildItem` Example +[source,java] +---- +@BuildStep +AnnotationsTransformerBuildItem transform() { + return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { + + public boolean appliesTo(org.jboss.jandex.AnnotationTarget.Kind kind) { + return kind == org.jboss.jandex.AnnotationTarget.Kind.CLASS; <1> + } + + public void transform(TransformationContext context) { + if (context.getTarget().asClass().name().toString().equals("org.acme.Bar")) { + context.transform().add(MyInterceptorBinding.class).done(); <2> + } + } + }); +} +---- +<1> The transformer is only applied to classes. +<2> If the class name equals to `org.acme.Bar` then add `@MyInterceptorBinding`. Don't forget to invoke `Transformation#done()`. + +NOTE: Keep in mind that annotation transformers must be produced _before_ the bean discovery starts. + +Build steps can query the transformed annotations for a given annotation target via the `TransformedAnnotationsBuildItem`. + +.`TransformedAnnotationsBuildItem` Example +[source,java] +---- +@BuildStep +void queryAnnotations(TransformedAnnotationsBuildItem transformedAnnotations, BuildProducer myBuildItem) { + ClassInfo myClazz = ...; + if (transformedAnnotations.getAnnotations(myClazz).isEmpty()) { <1> + myBuildItem.produce(new MyBuildItem()); + } +} +---- +<1> `TransformedAnnotationsBuildItem.getAnnotations()` will return a possibly transformed set of annotations. + +NOTE: There are other build items specialized in transformation: <> and <>. + +[[inspect_beans]] +== Use Case - Inspect Beans, Observers and Injection Points + +=== _Solution 1_: `BeanDiscoveryFinishedBuildItem` + +Consumers of `BeanDiscoveryFinishedBuildItem` can easily inspect all class-based beans, observers and injection points registered in the application. +However, synthetic beans and observers are _not included_ because this build item is produced _before_ the synthetic components are registered. + +Additionaly, the bean resolver returned from `BeanDiscoveryFinishedBuildItem#getBeanResolver()` can be used to apply the type-safe resolution rules, e.g. to find out whether there is a bean that would satisfy certain combination of required type and qualifiers. + +.`BeanDiscoveryFinishedBuildItem` Example +[source,java] +---- +@BuildStep +void doSomethingWithNamedBeans(BeanDiscoveryFinishedBuildItem beanDiscovery, BuildProducer namedBeans) { + List namedBeans = beanDiscovery.beanStream().withName().collect(toList())); <1> + namedBeans.produce(new NamedBeansBuildItem(namedBeans)); +} +---- +<1> The resulting list will not contain `@Named` synthetic beans. + +=== _Solution 2_: `SynthesisFinishedBuildItem` + +Consumers of `SynthesisFinishedBuildItem` can easily inspect all beans, observers and injection points registered in the application. Synthetic beans and observers are included because this build item is produced _after_ the synthetic components are registered. + +Additionaly, the bean resolver returned from `SynthesisFinishedBuildItem#getBeanResolver()` can be used to apply the type-safe resolution rules, e.g. to find out whether there is a bean that would satisfy certain combination of required type and qualifiers. + +.`SynthesisFinishedBuildItem` Example +[source,java] +---- +@BuildStep +void doSomethingWithNamedBeans(SynthesisFinishedBuildItem synthesisFinished, BuildProducer namedBeans) { + List namedBeans = synthesisFinished.beanStream().withName().collect(toList())); <1> + namedBeans.produce(new NamedBeansBuildItem(namedBeans)); +} +---- +<1> The resulting list will contain `@Named` synthetic beans. + +[[synthetic_beans]] +== Use Case - The Need for Synthetic Beans + +Sometimes it is practical to be able to register a _synthetic bean_. +Bean attributes of a synthetic bean are not derived from a Java class, method or field. +Instead, all the attributes are defined by an extension. +In regular CDI, this could be achieved using the https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#after_bean_discovery[`AfterBeanDiscovery.addBean()`, window="_blank"] methods. + +_Solution_: If you need to register a synthetic bean then use the `SyntheticBeanBuildItem`. + +.`SyntheticBeanBuildItem` Example 1 +[source,java] +---- +@BuildStep +SyntheticBeanBuildItem syntheticBean() { + return SyntheticBeanBuildItem.configure(String.class) + .qualifiers(new MyQualifierLiteral()) + .creator(mc -> mc.returnValue(mc.load("foo"))) <1> + .done(); +} +---- +<1> Generate the bytecode of the `javax.enterprise.context.spi.Contextual#create(CreationalContext)` implementation. + +The output of a bean configurator is recorded as bytecode. +Therefore, there are some limitations in how a synthetic bean instance is created at runtime. +You can: + +1. Generate the bytecode of the `Contextual#create(CreationalContext)` method directly via `ExtendedBeanConfigurator.creator(Consumer)`. +2. Pass a `io.quarkus.arc.BeanCreator` implementation class via `ExtendedBeanConfigurator#creator(Class>)`, and possibly specify some parameters via `ExtendedBeanConfigurator#param()`. +3. Produce the runtime instance through a proxy returned from a <> and set it via `ExtendedBeanConfigurator#runtimeValue(RuntimeValue)` or `ExtendedBeanConfigurator#supplier(Supplier)`. + +.`SyntheticBeanBuildItem` Example 2 +[source,java] +---- +@BuildStep +@Record(STATIC_INIT) <1> +SyntheticBeanBuildItem syntheticBean(TestRecorder recorder) { + return SyntheticBeanBuildItem.configure(Foo.class).scope(Singleton.class) + .runtimeValue(recorder.createFoo()) <2> + .done(); +} +---- +<1> By default, a synthetic bean is initialized during `STATIC_INIT`. +<2> The bean instance is supplied by a value returned from a recorder method. + +It is possible to mark a synthetic bean to be initialized during `RUNTIME_INIT`. +See the <> for more information about the difference between `STATIC_INIT` and `RUNTIME_INIT`. + +.`RUNTIME_INIT` `SyntheticBeanBuildItem` Example +[source,java] +---- +@BuildStep +@Record(RUNTIME_INIT) <1> +SyntheticBeanBuildItem syntheticBean(TestRecorder recorder) { + return SyntheticBeanBuildItem.configure(Foo.class).scope(Singleton.class) + .setRuntimeInit() <2> + .runtimeValue(recorder.createFoo()) + .done(); +} +---- +<1> The recorder must be executed in the `ExecutionTime.RUNTIME_INIT` phase. +<2> The bean instance is initialized during `RUNTIME_INIT`. + +[IMPORTANT] +==== +Synthetic beans initialized during `RUNTIME_INIT` must not be accessed during `STATIC_INIT`. `RUNTIME_INIT` build steps that access a runtime-init synthetic bean should consume the `SyntheticBeansRuntimeInitBuildItem`: + +[source,java] +---- +@BuildStep +@Record(RUNTIME_INIT) +@Consume(SyntheticBeansRuntimeInitBuildItem.class) <1> +void accessFoo(TestRecorder recorder) { + recorder.foo(); <2> +} +---- +<1> This build step must be executed after `syntheticBean()` completes. +<2> This recorder method results in an invocation upon the `Foo` bean instance and thus we need to make sure that the build step is executed after all synthetic beans are initialized. +==== + +NOTE: It is also possible to use the `BeanRegistrationPhaseBuildItem` to register a synthetic bean. However, we recommend extension authors to stick with `SyntheticBeanBuildItem` which is more idiomatic for Quarkus. + +[[synthetic_observers]] +== Use Case - Synthetic Observers + +Similar to <>, the attributes of a synthetic observer method are not derived from a Java method. Instead, all the attributes are defined by an extension. + +_Solution_: If you need to register a synthetic observer, use the `ObserverRegistrationPhaseBuildItem`. + +IMPORTANT: A build step that consumes the `ObserverRegistrationPhaseBuildItem` should always produce an `ObserverConfiguratorBuildItem` or at least inject a `BuildProducer` for this build item, otherwise it could be ignored or processed at the wrong time (e.g. after the correct CDI bootstrap phase). + +.`ObserverRegistrationPhaseBuildItem` Example +[source,java] +---- +@BuildStep +void syntheticObserver(ObserverRegistrationPhaseBuildItem observerRegistrationPhase, + BuildProducer myBuildItem, + BuildProducer observerConfigurators) { + observerConfigurators.produce(new ObserverConfiguratorBuildItem(observerRegistrationPhase.getContext() + .configure() + .beanClass(DotName.createSimple(MyBuildStep.class.getName())) + .observedType(String.class) + .notify(mc -> { + // do some gizmo bytecode generation... + }))); + myBuildItem.produce(new MyBuildItem()); +} +---- + +The output of a `ObserverConfigurator` is recorded as bytecode. +Therefore, there are some limitations in how a synthetic observer is invoked at runtime. +Currently, you must generate the bytecode of the method body directly. + +[[generated_beans]] +== Use Case - I Have a Generated Bean Class + +No problem. +You can generate the bytecode of a bean class manually and then all you need to do is to produce a `GeneratedBeanBuildItem` instead of `GeneratedClassBuildItem`. + +.`GeneratedBeanBuildItem` Example +[source,java] +---- +@BuildStep +void generatedBean(BuildProducer generatedBeans) { + ClassOutput beansClassOutput = new GeneratedBeanGizmoAdaptor(generatedBeans); <1> + ClassCreator beanClassCreator = ClassCreator.builder().classOutput(beansClassOutput) + .className("org.acme.MyBean") + .build(); + beanClassCreator.addAnnotation(Singleton.class); + beanClassCreator.close(); <2> +} +---- +<1> `io.quarkus.arc.deployment.GeneratedBeanGizmoAdaptor` makes it easy to produce ``GeneratedBeanBuildItem``s from Gizmo constructs. +<2> The resulting bean class is something like `public class @Singleton MyBean { }`. + +== Use Case - I Need to Validate the Deployment + +Sometimes extensions need to inspect the beans, observers and injection points, then perform additional validations and fail the build if something is wrong. + +_Solution_: If an extension needs to validate the deployment it should use the `ValidationPhaseBuildItem`. + +IMPORTANT: A build step that consumes the `ValidationPhaseBuildItem` should always produce a `ValidationErrorBuildItem` or at least inject a `BuildProducer` for this build item, otherwise it could be ignored or processed at the wrong time (e.g. after the correct CDI bootstrap phase). + +[source,java] +---- +@BuildStep +void validate(ValidationPhaseBuildItem validationPhase, + BuildProducer myBuildItem, + BuildProducer errors) { + if (someCondition) { + errors.produce(new ValidationErrorBuildItem(new IllegalStateException())); + myBuildItem.produce(new MyBuildItem()); + } +} +---- + +TIP: You can easily filter all registered beans via the convenient `BeanStream` returned from the `ValidationPhaseBuildItem.getContext().beans()` method. + +[[custom_context]] +== Use Case - Register a Custom CDI Context + +Sometimes extensions need to extend the set of built-in CDI contexts. + +_Solution_: If you need to register a custom context, use the `ContextRegistrationPhaseBuildItem`. + +IMPORTANT: A build step that consumes the `ContextRegistrationPhaseBuildItem` should always produce a `ContextConfiguratorBuildItem` or at least inject a `BuildProducer` for this build item, otherwise it could be ignored or processed at the wrong time (e.g. after the correct CDI bootstrap phase). + +`ContextRegistrationPhaseBuildItem` Example +[source,java] +---- +@BuildStep +ContextConfiguratorBuildItem registerContext(ContextRegistrationPhaseBuildItem phase) { + return new ContextConfiguratorBuildItem(phase.getContext().configure(TransactionScoped.class).normal().contextClass(TransactionContext.class)); +} +---- + +Additionally, each extension that registers a custom CDI context via `ContextRegistrationPhaseBuildItem` should also produce the `CustomScopeBuildItem` in order to contribute the custom scope annotation name to the set of bean defining annotations. + +`CustomScopeBuildItem` Example +[source,java] +---- +@BuildStep +CustomScopeBuildItem customScope() { + return new CustomScopeBuildItem(DotName.createSimple(TransactionScoped.class.getName())); +} +---- + +=== What if I Need to Know All the Scopes Used in the Application? + +_Solution_: You can inject the `CustomScopeAnnotationsBuildItem` in a build step and use the convenient methods such as `CustomScopeAnnotationsBuildItem.isScopeDeclaredOn()`. + +[[additional_interceptor_bindings]] +== Use Case - Additional Interceptor Bindings + +In rare cases it might be handy to programmatically register an existing annotation that is not annotated with `@javax.interceptor.InterceptorBinding` as an interceptor binding. +This is similar to what CDI achieves through `BeforeBeanDiscovery#addInterceptorBinding()`. +We are going to use `InterceptorBindingRegistrarBuildItem` to get it done. + +.`InterceptorBindingRegistrarBuildItem` Example +[source,java] +---- +@BuildStep +InterceptorBindingRegistrarBuildItem addInterceptorBindings() { + return new InterceptorBindingRegistrarBuildItem(new InterceptorBindingRegistrar() { + @Override + public Map> registerAdditionalBindings() { + return Collections.singletonMap(DotName.createSimple(NotAnInterceptorBinding.class.getName()), + Collections.emptySet()); + } + }); +} +---- + +== Use Case - Additional Qualifiers + +Sometimes it might be useful to register an existing annotation that is not annotated with `@javax.inject.Qualifier` as a CDI qualifier. +This is similar to what CDI achieves through `BeforeBeanDiscovery#addQualifier()`. +We are going to use `QualifierRegistrarBuildItem` to get it done. + +.`QualifierRegistrarBuildItem` Example +[source,java] +---- +@BuildStep +QualifierRegistrarBuildItem addQualifiers() { + return new QualifierRegistrarBuildItem(new QualifierRegistrar() { + @Override + public Map> getAdditionalQualifiers() { + return Collections.singletonMap(DotName.createSimple(NotAQualifier.class.getName()), + Collections.emptySet()); + } + }); +} +---- + +[[injection_point_transformation]] +== Use Case - Injection Point Transformation + +Every now and then it is handy to be able to change the qualifiers of an injection point programmatically. +You can do just that with `InjectionPointTransformerBuildItem`. +The following sample shows how to apply transformation to injection points with type `Foo` that contain qualifier `MyQualifier`: + +.`InjectionPointTransformerBuildItem` Example +[source,java] +---- +@BuildStep +InjectionPointTransformerBuildItem transformer() { + return new InjectionPointTransformerBuildItem(new InjectionPointsTransformer() { + + public boolean appliesTo(Type requiredType) { + return requiredType.name().equals(DotName.createSimple(Foo.class.getName())); + } + + public void transform(TransformationContext context) { + if (context.getQualifiers().stream() + .anyMatch(a -> a.name().equals(DotName.createSimple(MyQualifier.class.getName())))) { + context.transform() + .removeAll() + .add(DotName.createSimple(MyOtherQualifier.class.getName())) + .done(); + } + } + }); +} +---- + +NOTE: In theory, you can use <> to achieve the same goal. However, there are few differences that make `InjectionPointsTransformer` more suitable for this particular task: (1) annotation transformers are applied to all classes during bean discovery, whereas `InjectionPointsTransformer` is only applied to discovered injection points after bean discovery; (2) with `InjectionPointsTransformer` you don't need to handle various types of injection points (field, parameters of initializer methods, etc.). + +== Use Case - Resource Annotations and Injection + +The `ResourceAnnotationBuildItem` can be used to specify resource annotations that make it possible to resolve non-CDI injection points, such as Jakarta EE resources. +An integrator must also provide a corresponding `io.quarkus.arc.ResourceReferenceProvider` service provider implementation. + +.`ResourceAnnotationBuildItem` Example +[source,java] +---- +@BuildStep +void setupResourceInjection(BuildProducer resourceAnnotations, BuildProducer resources) { + resources.produce(new GeneratedResourceBuildItem("META-INF/services/io.quarkus.arc.ResourceReferenceProvider", + MyResourceReferenceProvider.class.getName().getBytes())); + resourceAnnotations.produce(new ResourceAnnotationBuildItem(DotName.createSimple(MyAnnotation.class.getName()))); +} +---- + +[[build_metadata]] +== Available Build Time Metadata + +Any of the above extensions that operates with `BuildExtension.BuildContext` can leverage certain build time metadata that are generated during build. +The built-in keys located in `io.quarkus.arc.processor.BuildExtension.Key` are: + +ANNOTATION_STORE:: Contains an `AnnotationStore` that keeps information about all `AnnotationTarget` annotations after application of annotation transformers +INJECTION_POINTS:: `Collection` containing all injection points +BEANS:: `Collection` containing all beans +REMOVED_BEANS:: `Collection` containing all the removed beans; see <> for more information +OBSERVERS:: `Collection` containing all observers +SCOPES:: `Collection` containing all scopes, including custom ones +QUALIFIERS:: `Map` containing all qualifiers +INTERCEPTOR_BINDINGS:: `Map` containing all interceptor bindings +STEREOTYPES:: `Map` containing all stereotypes + +To get hold of these, simply query the extension context object for given key. +Note that these metadata are made available as build proceeds which means that extensions can only leverage metadata that were built before the extensions are invoked. +If your extension attempts to retrieve metadata that wasn't yet produced, `null` will be returned. +Here is a summary of which extensions can access which metadata: + +AnnotationsTransformer:: Shouldn't rely on any metadata as it could be used at any time in any phase of the bootstrap +ContextRegistrar:: Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES` +InjectionPointsTransformer:: Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES` +ObserverTransformer:: Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES` +BeanRegistrar:: Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES`, `BEANS` (class-based beans only), `OBSERVERS` (class-based observers only), `INJECTION_POINTS` +ObserverRegistrar:: Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES`, `BEANS`, `OBSERVERS` (class-based observers only), `INJECTION_POINTS` +BeanDeploymentValidator:: Has access to all build metadata diff --git a/docs/src/main/asciidoc/cdi-reference.adoc b/docs/src/main/asciidoc/cdi-reference.adoc index 8ff45199a1805..43d9a167a039f 100644 --- a/docs/src/main/asciidoc/cdi-reference.adoc +++ b/docs/src/main/asciidoc/cdi-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Contexts and Dependency Injection @@ -11,7 +11,7 @@ include::./attributes.adoc[] :sectnumlevels: 4 :toc: -Quarkus DI solution is based on the http://docs.jboss.org/cdi/spec/2.0/cdi-spec.html[Contexts and Dependency Injection for Java 2.0, window="_blank"] specification. +Quarkus DI solution (also called ArC) is based on the http://docs.jboss.org/cdi/spec/2.0/cdi-spec.html[Contexts and Dependency Injection for Java 2.0, window="_blank"] specification. However, it is not a full CDI implementation verified by the TCK. Only a subset of the CDI features is implemented - see also <> and <>. @@ -67,6 +67,15 @@ To generate the index just add the following to your `pom.xml`: ---- +If you are are using gradle, you can apply the following plugin to your `build.gradle`: + +[source,groovy] +---- +plugins { + id 'org.kordamp.gradle.jandex' version '0.6.0' +} +---- + If you can't modify the dependency, you can still index it by adding `quarkus.index-dependency` entries to your `application.properties`: [source,properties] @@ -78,6 +87,15 @@ quarkus.index-dependency..classifier=(this one is optional) For example, the following entries ensure that the `org.acme:acme-api` dependency is indexed: +.Example application.properties +[source,properties] +---- +quarkus.index-dependency.acme.group-id=org.acme <1> +quarkus.index-dependency.acme.artifact-id=acme-api <2> +---- +<1> Value is a group id for a dependency identified by name `acme`. +<2> Value is an artifact id for a dependency identified by name `acme`. + === How To Exclude Types and Dependencies from Discovery It may happen that some beans from third-party libraries do not work correctly in Quarkus. @@ -118,7 +136,7 @@ quarkus.arc.exclude-dependency.acme.artifact-id=acme-services <2> == Native Executables and Private Members Quarkus is using GraalVM to build a native executable. -One of the limitations of GraalVM is the usage of https://github.com/oracle/graal/blob/master/substratevm/LIMITATIONS.md#user-content-reflection[Reflection, window="_blank"]. +One of the limitations of GraalVM is the usage of https://github.com/oracle/graal/blob/master/substratevm/Limitations.md#reflection[Reflection, window="_blank"]. Reflective operations are supported but all relevant members must be registered for reflection explicitly. Those registrations result in a bigger native executable. @@ -250,6 +268,7 @@ public class PingResource { <2> Injection itself does not result in the instantiation of `CoolService`. A client proxy is injected. <3> The first invocation upon the injected proxy triggers the instantiation of `CoolService`. +[[startup_event]] ==== Startup Event However, if you really need to instantiate a bean eagerly you can: @@ -483,7 +502,7 @@ Quarkus adds a capability that CDI currently does not support which is to condit via the `@io.quarkus.arc.profile.IfBuildProfile` and `@io.quarkus.arc.profile.UnlessBuildProfile` annotations. When used in conjunction with `@io.quarkus.arc.DefaultBean`, these annotations allow for the creation of different bean configurations for different build profiles. -Imagine for instance that an application contains a bean named `Tracer`, which needs to be do nothing when in tests or dev-mode, but work in its normal capacity for the production artifact. +Imagine for instance that an application contains a bean named `Tracer`, which needs to be do nothing when in tests or dev-mode, but works in its normal capacity for the production artifact. An elegant way to create such beans is the following: [source,java] @@ -505,7 +524,7 @@ public class TracerConfiguration { } ---- -If instead, it is required that the `Tracer` bean also work in dev-mode and only default to doing nothing for tests, then `@UnlessBuildProfile` would be ideal. The code would look like: +If instead, it is required that the `Tracer` bean also works in dev-mode and only default to doing nothing for tests, then `@UnlessBuildProfile` would be ideal. The code would look like: [source,java] ---- @@ -513,13 +532,13 @@ If instead, it is required that the `Tracer` bean also work in dev-mode and only public class TracerConfiguration { @Produces - @DefaultBean + @UnlessBuildProfile("test") // this will be enabled for both prod and dev build time profiles public Tracer realTracer(Reporter reporter, Configuration configuration) { return new RealTracer(reporter, configuration); } @Produces - @UnlessBuildProfile("prod") // this will be enabled for both test and dev build time profiles + @DefaultBean public Tracer noopTracer() { return new NoopTracer(); } @@ -531,7 +550,7 @@ NOTE: The runtime profile has absolutely no effect on the bean resolution using === Enabling Beans for Quarkus Build Properties Quarkus adds a capability that CDI currently does not support which is to conditionally enable a bean when a Quarkus build time property has a specific value, -via the `@io.quarkus.arc.properties.IfBuildProperty` annotation. +via the `@io.quarkus.arc.properties.IfBuildProperty` and `@io.quarkus.arc.properties.UnlessBuildProperty` annotation. When used in conjunction with `@io.quarkus.arc.DefaultBean`, this annotation allow for the creation of different bean configurations for different build properties. The scenario we mentioned above with `Tracer` could also be implemented in the following way: @@ -555,6 +574,27 @@ public class TracerConfiguration { } ---- +If instead, it is required that the `RealTracer` bean is only used if the `some.tracer.enabled` property is not `false`, then `@UnlessBuildProperty` would be ideal. The code would look like: + +[source,java] +---- +@Dependent +public class TracerConfiguration { + + @Produces + @UnlessBuildProperty(name = "some.tracer.enabled", stringValue = "false") + public Tracer realTracer(Reporter reporter, Configuration configuration) { + return new RealTracer(reporter, configuration); + } + + @Produces + @DefaultBean + public Tracer noopTracer() { + return new NoopTracer(); + } +} +---- + NOTE: Properties set at runtime have absolutely no effect on the bean resolution using `@IfBuildProperty`. === Declaring Selected Alternatives @@ -635,7 +675,7 @@ It's possible to annotate a non-private static method with an interceptor bindin class Services { @Logged <1> - static BigDecimal computePrice(long amout) { <2> + static BigDecimal computePrice(long amount) { <2> BigDecimal price; // Perform computations... return price; @@ -681,7 +721,7 @@ import io.quarkus.arc.Lock; @ApplicationScoped class SharedService { - void addAmount(BigDecimal amout) { + void addAmount(BigDecimal amount) { // ...changes some internal state of the bean } @@ -695,368 +735,75 @@ class SharedService { <2> `@Lock(Lock.Type.READ)` overrides the value specified at class level. It means that any number of clients can invoke the method concurrently, unless the bean instance is locked by `@Lock(Lock.Type.WRITE)`. <3> You can also specify the "wait time". If it's not possible to acquire the lock in the given time a `LockException` is thrown. -[[build_time_apis]] -== Build Time Extension Points - -[[portable_extensions]] -=== Portable Extensions - -Quarkus incorporates build-time optimizations in order to provide instant startup and low memory footprint. -The downside of this approach is that CDI Portable Extensions cannot be supported. -Nevertheless, most of the functionality can be achieved using Quarkus link:writing-extensions[extensions]. - -=== Additional Bean Defining Annotations - -As described in <> bean classes that don’t have a bean defining annotation are not discovered. -However, `BeanDefiningAnnotationBuildItem` can be used to extend the set of default bean defining annotations (`@Dependent`, `@Singleton`, `@ApplicationScoped`, `@RequestScoped` and `@Stereotype` annotations): - -[source,java] ----- -@BuildStep -BeanDefiningAnnotationBuildItem additionalBeanDefiningAnnotation() { - return new BeanDefiningAnnotationBuildItem(DotName.createSimple("javax.ws.rs.Path"))); -} ----- - -NOTE: Bean registrations that are result of a `BeanDefiningAnnotationBuildItem` are unremovable by default. See also <>. - -=== Resource Annotations - -`ResourceAnnotationBuildItem` is used to specify resource annotations that make it possible to resolve non-CDI injection points, such as Java EE resources. - -NOTE: An integrator must also provide a corresponding `io.quarkus.arc.ResourceReferenceProvider` implementation. - -[source,java] ----- -@BuildStep -void setupResourceInjection(BuildProducer resourceAnnotations, BuildProducer resources) { - resources.produce(new GeneratedResourceBuildItem("META-INF/services/io.quarkus.arc.ResourceReferenceProvider", - JPAResourceReferenceProvider.class.getName().getBytes())); - resourceAnnotations.produce(new ResourceAnnotationBuildItem(DotName.createSimple(PersistenceContext.class.getName()))); -} ----- - -[[additional_beans]] -=== Additional Beans - -`AdditionalBeanBuildItem` is used to specify additional bean classes to be analyzed during discovery. -Additional bean classes are transparently added to the application index processed by the container. - -[source,java] ----- -@BuildStep -List additionalBeans() { - return Arrays.asList( - new AdditionalBeanBuildItem(SmallRyeHealthReporter.class), - new AdditionalBeanBuildItem(HealthServlet.class)); -} ----- - -NOTE: A bean registration that is a result of an `AdditionalBeanBuildItem` is removable by default. See also <>. - -[[synthetic_beans]] -=== Synthetic Beans - -Sometimes it is very useful to be able to register a synthetic bean. -Bean attributes of a synthetic bean are not derived from a java class, method or field. -Instead, the attributes are specified by an extension. -In CDI, this could be achieved using the `AfterBeanDiscovery.addBean()` methods. -In Quarkus, there are three ways to register a synthetic bean. - -==== `BeanRegistrarBuildItem` - -A build step can produce a `BeanRegistrarBuildItem` and leverage the `io.quarkus.arc.processor.BeanConfigurator` API to build a synthetic bean definition. - -.`BeanRegistrarBuildItem` Example -[source,java] ----- -@BuildStep -BeanRegistrarBuildItem syntheticBean() { - return new BeanRegistrarBuildItem(new BeanRegistrar() { - - @Override - public void register(RegistrationContext registrationContext) { - registrationContext.configure(String.class).types(String.class).qualifiers(new MyQualifierLiteral()).creator(mc -> mc.returnValue(mc.load("foo"))).done(); - } - })); -} ----- - -NOTE: The output of a `BeanConfigurator` is recorded as bytecode. Therefore there are some limitations in how a synthetic bean instance is created. See also `BeanConfigurator.creator()` methods. - -TIP: You can easily filter all class-based beans via the convenient `BeanStream` returned from the `RegistrationContext.beans()` method. - -==== `BeanRegistrationPhaseBuildItem` - -If a build step *needs to produce other build items during the registration* it should use the `BeanRegistrationPhaseBuildItem`. -The reason is that injected objects are only valid during a `@BuildStep` method invocation. - -.`BeanRegistrationPhaseBuildItem` Example -[source,java] ----- -@BuildStep -void syntheticBean(BeanRegistrationPhaseBuildItem beanRegistrationPhase, - BuildProducer myBuildItem, - BuildProducer beanConfigurators) { - beanConfigurators.produce(new BeanConfiguratorBuildItem(beanRegistrationPhase.getContext().configure(String.class).types(String.class).qualifiers(new MyQualifierLiteral()).creator(mc -> mc.returnValue(mc.load("foo"))))); - myBuildItem.produce(new MyBuildItem()); -} ----- - -NOTE: See the `BeanRegistrationPhaseBuildItem` javadoc for more information. - -==== `SyntheticBeanBuildItem` - -Finally, a build step can produce a `SyntheticBeanBuildItem` to register a synthetic bean whose instance can be easily *produced through a <>*. -The extended `BeanConfigurator` accepts either a `io.quarkus.runtime.RuntimeValue` or a `java.util.function.Supplier`. - -.`SyntheticBeanBuildItem` Example -[source,java] ----- -@BuildStep -@Record(STATIC_INIT) -SyntheticBeanBuildItem syntheticBean(TestRecorder recorder) { - return SyntheticBeanBuildItem.configure(Foo.class).scope(Singleton.class) - .runtimeValue(recorder.createFoo()) - .done(); -} ----- - - -=== Annotation Transformations - -A very common task is to override the annotations found on the bean classes. -For example you might want to add an interceptor binding to a specific bean class. -Here is how to do it - use the `AnnotationsTransformerBuildItem`: - -[source,java] ----- -@BuildStep -AnnotationsTransformerBuildItem transform() { - return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { - - public boolean appliesTo(org.jboss.jandex.AnnotationTarget.Kind kind) { - return kind == org.jboss.jandex.AnnotationTarget.Kind.CLASS; - } - - public void transform(TransformationContext context) { - if (context.getTarget().asClass().name().toString().equals("com.foo.Bar")) { - context.transform().add(MyInterceptorBinding.class).done(); - } - } - }); -} ----- - -=== Additional Interceptor Bindings - -In rare cases it might be handy to programmatically register an existing annotation as interceptor binding. -This is similar to what pure CDI achieves through `BeforeBeanDiscovery#addInterceptorBinding()`. -Though here we are going to use `InterceptorBindingRegistrarBuildItem` to get it done. -Note that you can register multiple annotations in one go: - -[source,java] ----- -@BuildStep -InterceptorBindingRegistrarBuildItem addInterceptorBindings() { - InterceptorBindingRegistrarBuildItem additionalBindingsRegistrar = new InterceptorBindingRegistrarBuildItem(new InterceptorBindingRegistrar() { - @Override - public Collection registerAdditionalBindings() { - Collection result = new HashSet<>(); - result.add(DotName.createSimple(MyAnnotation.class.getName())); - result.add(DotName.createSimple(MyOtherAnnotation.class.getName())); - return result; - } - }); - return additionalBindingsRegistrar; -} ----- - -=== Injection Point Transformation - -Every now and then it is handy to be able to change qualifiers of an injection point programmatically. -You can do just that with `InjectionPointTransformerBuildItem`. -The following sample shows how to apply transformation to injection points with type `Foo` that contain qualifier `MyQualifier`: - -[source,java] ----- -@BuildStep -InjectionPointTransformerBuildItem transformer() { - return new InjectionPointTransformerBuildItem(new InjectionPointsTransformer() { - - public boolean appliesTo(Type requiredType) { - return requiredType.name().equals(DotName.createSimple(Foo.class.getName())); - } - - public void transform(TransformationContext context) { - if (context.getQualifiers().stream() - .anyMatch(a -> a.name().equals(DotName.createSimple(MyQualifier.class.getName())))) { - context.transform() - .removeAll() - .add(DotName.createSimple(MyOtherQualifier.class.getName())) - .done(); - } - } - }); -} ----- +=== Repeatable interceptor bindings -=== Observer Transformation +Quarkus has limited support for `@Repeatable` interceptor binding annotations. -Any https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#observer_methods[observer method] definition can be vetoed or transformed using an `ObserverTransformerBuildItem`. -The attributes that can be transformed include: +When binding an interceptor to a component, you can declare multiple `@Repeatable` annotations on methods. +Repeatable interceptor bindings declared on classes and stereotypes are not supported, because there are some open questions around interactions with the Interceptors specification. +This might be added in the future. -- https://docs.jboss.org/cdi/api/2.0/javax/enterprise/inject/spi/ObserverMethod.html#getObservedQualifiers--[qualifiers] -- https://docs.jboss.org/cdi/api/2.0/javax/enterprise/inject/spi/ObserverMethod.html#getReception--[reception] -- https://docs.jboss.org/cdi/api/2.0/javax/enterprise/inject/spi/ObserverMethod.html#getPriority--[priority] -- https://docs.jboss.org/cdi/api/2.0/javax/enterprise/inject/spi/ObserverMethod.html#getTransactionPhase--[transaction phase] -- https://docs.jboss.org/cdi/api/2.0/javax/enterprise/inject/spi/ObserverMethod.html#isAsync--[asynchronous] +As an example, suppose we have an interceptor that clears a cache. +The corresponding interceptor binding would be called `@CacheInvalidateAll` and would be declared as `@Repeatable`. +If we wanted to clear two caches at the same time, we would add `@CacheInvalidateAll` twice: [source,java] ---- -@BuildStep -ObserverTransformerBuildItem transformer() { - return new ObserverTransformerBuildItem(new ObserverTransformer() { - - public boolean appliesTo(Type observedType, Set qualifiers) { - return observedType.name.equals(DotName.createSimple(MyEvent.class.getName())); - } - - public void transform(TransformationContext context) { - // Veto all observers of MyEvent - context.veto(); - } - }); +@ApplicationScoped +class CachingService { + @CacheInvalidateAll(cacheName = "foo") + @CacheInvalidateAll(cacheName = "bar") + void heavyComputation() { + // ... + // some computation that updates a lot of data + // and requires 2 caches to be invalidated + // ... + } } ---- -=== Bean Deployment Validation - -Once the bean deployment is ready an extension can perform additional validations and inspect the found beans, observers and injection points. -Register a `BeanDeploymentValidatorBuildItem`: - -[source,java] ----- -@BuildStep -BeanDeploymentValidatorBuildItem beanDeploymentValidator() { - return new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() { - public void validate(ValidationContext validationContext) { - for (InjectionPointInfo injectionPoint : validationContext.get(Key.INJECTION_POINTS)) { - System.out.println("Injection point: " + injectionPoint); - } - } - }); -} ----- +This is how interceptors are used. +What about creating an interceptor? -TIP: You can easily filter all registered beans via the convenient `BeanStream` returned from the `ValidationContext.beans()` method. +When declaring interceptor bindings of an interceptor, you can add multiple `@Repeatable` annotations to the interceptor class as usual. +This is useless when the annotation members are `@Nonbinding`, as would be the case for the `@Cached` annotation, but is important otherwise. -If an extension needs to produce other build items during the "validation" phase it should use the `ValidationPhaseBuildItem` instead. -The reason is that injected objects are only valid during a `@BuildStep` method invocation. +For example, suppose we have an interceptor that can automatically log method invocations to certain targets. +The interceptor binding annotation `@Logged` would have a member called `target`, which specifies where to store the log. +Our implementation could be restricted to console logging and file logging: [source,java] ---- -@BuildStep -void validate(ValidationPhaseBuildItem validationPhase, - BuildProducer myBuildItem, - BuildProducer errors) { - if (someCondition) { - errors.produce(new ValidationErrorBuildItem(new IllegalStateException())); - myBuildItem.produce(new MyBuildItem()); - } +@Interceptor +@Logged(target = "console") +@Logged(target = "file") +class NaiveLoggingInterceptor { + // ... } ---- -NOTE: See `ValidationPhaseBuildItem` javadoc for more information. - - -=== Custom Contexts - -An extension can register a custom `InjectableContext` implementation by means of a `ContextRegistrarBuildItem`: - -[source,java] ----- -@BuildStep -ContextRegistrarBuildItem customContext() { - return new ContextRegistrarBuildItem(new ContextRegistrar() { - public void register(RegistrationContext registrationContext) { - registrationContext.configure(CustomScoped.class).normal().contextClass(MyCustomContext.class).done(); - } - }); -} ----- +Other interceptors could be provided to log method invocations to different targets. -If an extension needs to produce other build items during the "context registration" phase it should use the `ContextRegistrationPhaseBuildItem` instead. -The reason is that injected objects are only valid during a `@BuildStep` method invocation. +[[build_time_apis]] +== Build Time Extensions -[source,java] ----- -@BuildStep -void addContext(ContextRegistrationPhaseBuildItem contextRegistrationPhase, - BuildProducer myBuildItem, - BuildProducer contexts) { - contexts.produce(new ContextConfiguratorBuildItem(contextRegistrationPhase.getContext().configure(CustomScoped.class).normal().contextClass(MyCustomContext.class))); - myBuildItem.produce(new MyBuildItem()); -} ----- +Quarkus incorporates build-time optimizations in order to provide instant startup and low memory footprint. +The downside of this approach is that CDI Portable Extensions cannot be supported. +Nevertheless, most of the functionality can be achieved using Quarkus link:writing-extensions[extensions]. +See the link:cdi-integration[integration guide] for more information. -NOTE: See `ContextRegistrationPhaseBuildItem` javadoc for more information. - - -[[build_metadata]] -=== Available Build Time Metadata - -Any of the above extensions that operates with `BuildExtension.BuildContext` can leverage certain build time metadata that are generated during build. -The built-in keys located in `io.quarkus.arc.processor.BuildExtension.Key` are: - -* `ANNOTATION_STORE` -** Contains an `AnnotationStore` that keeps information about all `AnnotationTarget` annotations after application of annotation transformers -* `INJECTION_POINTS` -** `Collection` containing all injection points -* `BEANS` -** `Collection` containing all beans -* `REMOVED_BEANS` -** `Collection` containing all the removed beans; see <> for more information -* `OBSERVERS` -** `Collection` containing all observers -* `SCOPES` -** `Collection` containing all scopes, including custom ones -* `QUALIFIERS` -** `Map` containing all qualifiers -* `INTERCEPTOR_BINDINGS` -** `Map` containing all interceptor bindings -* `STEREOTYPES` -** `Map` containing all stereotypes - -To get hold of these, simply query the extension context object for given key. -Note that these metadata are made available as build proceeds which means that extensions can only leverage metadata that were build before they are invoked. -If your extension attempts to retrieve metadata that wasn't yet produced, `null` will be returned. -Here is a summary of which extensions can access which metadata: - -* `AnnotationsTransformer` -** Shouldn't rely on any metadata as this is one of the first CDI extensions invoked -* `ContextRegistrar` -** Has access to `ANNOTATION_STORE` -* `InjectionPointsTransformer` -** Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES` -* `ObserverTransformer` -** Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES` -* `BeanRegistrar` -** Has access to `ANNOTATION_STORE`, `QUALIFIERS`, `INTERCEPTOR_BINDINGS`, `STEREOTYPES`, `BEANS` -* `BeanDeploymentValidator` -** Has access to all build metadata - -[[dev-mode]] == Development Mode In the development mode, two special endpoints are registered automatically to provide some basic debug info in the JSON format: -* HTTP GET `/quarkus/arc/beans` - returns the list of all beans +* HTTP GET `/q/arc` - returns the summary; number of beans, config properties, etc. +* HTTP GET `/q/arc/beans` - returns the list of all beans ** You can use query params to filter the output: -*** `scope` - include beans with scope that ends with the given value, i.e. `http://localhost:8080/quarkus/arc/beans?scope=ApplicationScoped` -*** `beanClass` - include beans with bean class that starts with the given value, i.e. `http://localhost:8080/quarkus/arc/beans?beanClass=org.acme.Foo` -*** `kind` - include beans of the specified kind (`CLASS`, `PRODUCER_FIELD`, `PRODUCER_METHOD`, `INTERCEPTOR` or `SYNTHETIC`), i.e. `http://localhost:8080/quarkus/arc/beans?kind=PRODUCER_METHOD` -* HTTP GET `/quarkus/arc/observers` - returns the list of all observer methods +*** `scope` - include beans with scope that ends with the given value, i.e. `http://localhost:8080/q/arc/beans?scope=ApplicationScoped` +*** `beanClass` - include beans with bean class that starts with the given value, i.e. `http://localhost:8080/q/arc/beans?beanClass=org.acme.Foo` +*** `kind` - include beans of the specified kind (`CLASS`, `PRODUCER_FIELD`, `PRODUCER_METHOD`, `INTERCEPTOR` or `SYNTHETIC`), i.e. `http://localhost:8080/q/arc/beans?kind=PRODUCER_METHOD` +* HTTP GET `/q/arc/removed-beans` - returns the list of unused beans removed during build +* HTTP GET `/q/arc/observers` - returns the list of all observer methods NOTE: These endpoints are only available in the development mode, i.e. when you run your application via `mvn quarkus:dev` (or `./gradlew quarkusDev`). diff --git a/docs/src/main/asciidoc/cdi.adoc b/docs/src/main/asciidoc/cdi.adoc index 4805d02c537fd..2578910ddbc18 100644 --- a/docs/src/main/asciidoc/cdi.adoc +++ b/docs/src/main/asciidoc/cdi.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Introduction to Contexts and Dependency Injection @@ -13,26 +13,26 @@ include::./attributes.adoc[] In this guide we're going to describe the basic principles of the Quarkus programming model that is based on the http://docs.jboss.org/cdi/spec/2.0/cdi-spec.html[Contexts and Dependency Injection for Java 2.0, window="_blank"] specification. -== _Q: OK. Let's start simple. What is a bean?_ +== OK. Let's start simple. What is a bean? -A: Well, a bean is a _container-managed_ object that supports a set of basic services, such as injection of dependencies, lifecycle callbacks and interceptors. +Well, a bean is a _container-managed_ object that supports a set of basic services, such as injection of dependencies, lifecycle callbacks and interceptors. -== _Q: Wait a minute. What does "container-managed" mean?_ +== Wait a minute. What does "container-managed" mean? -A: Simply put, you don't control the lifecycle of the object instance directly. +Simply put, you don't control the lifecycle of the object instance directly. Instead, you can affect the lifecycle through declarative means, such as annotations, configuration, etc. The container is the _environment_ where your application runs. It creates and destroys the instances of beans, associates the instances with a designated context, and injects them into other beans. -== _Q: What is it good for?_ +== What is it good for? -A: An application developer can focus on the business logic rather than finding out "where and how" to obtain a fully initialized component with all of its dependencies. +An application developer can focus on the business logic rather than finding out "where and how" to obtain a fully initialized component with all of its dependencies. NOTE: You've probably heard of the _inversion of control_ (IoC) programming principle. Dependency injection is one of the implementation techniques of IoC. -== _Q: What does a bean look like?_ +== What does a bean look like? -A: There are several kinds of beans. +There are several kinds of beans. The most common ones are class-based beans: .Simple Bean Example @@ -56,11 +56,12 @@ public class Translator { ---- <1> This is a scope annotation. It tells the container which context to associate the bean instance with. In this particular case, a *single bean instance* is created for the application and used by all other beans that inject `Translator`. <2> This is a field injection point. It tells the container that `Translator` depends on the `Dictionary` bean. If there is no matching bean the build fails. -<3> This is an interceptor binding annotation. In this case, the annotation comes from the MicroProfile Metrics. The relevant interceptor intercepts the invocation and updates the relevant metrics. We will about <> later. +<3> This is an interceptor binding annotation. In this case, the annotation comes from the MicroProfile Metrics. The relevant interceptor intercepts the invocation and updates the relevant metrics. We will talk about <> later. -== _Q: Nice. How does the dependency resolution work? I see no names or identifiers._ +[[typesafe_resolution]] +== Nice. How does the dependency resolution work? I see no names or identifiers. -A: That's a good question. +That's a good question. In CDI the process of matching a bean to an injection point is *type-safe*. Each bean declares a set of bean types. In our example above, the `Translator` bean has two bean types: `Translator` and `java.lang.Object`. @@ -68,9 +69,9 @@ Subsequently, a bean is assignable to an injection point if the bean has a bean We'll talk about qualifiers later. For now, it's enough to know that the bean above is assignable to an injection point of type `Translator` and `java.lang.Object`. -== _Q: Hm, wait a minute. What happens if multiple beans declare the same type?_ +== Hm, wait a minute. What happens if multiple beans declare the same type? -A: There is a simple rule: *exactly one bean must be assignable to an injection point, otherwise the build fails*. +There is a simple rule: *exactly one bean must be assignable to an injection point, otherwise the build fails*. If none is assignable the build fails with `UnsatisfiedResolutionException`. If multiple are assignable the build fails with `AmbiguousResolutionException`. This is very useful because your application fails fast whenever the container is not able to find an unambiguous dependency for any injection point. @@ -97,11 +98,11 @@ public class Translator { <2> `javax.enterprise.inject.Instance` extends `Iterable`. ==== -== _Q: Can I use setter and constructor injection?_ +== Can I use setter and constructor injection? -A: Yes, you can. +Yes, you can. In fact, in CDI the "setter injection" is superseded by more powerful https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#initializer_methods[initializer methods, window="_blank"]. -Intializers may accept multiple parameters and don't have to follow the JavaBean naming conventions. +Initializers may accept multiple parameters and don't have to follow the JavaBean naming conventions. .Initialized and Constructor Injection Example [source,java] @@ -109,7 +110,7 @@ Intializers may accept multiple parameters and don't have to follow the JavaBean @ApplicationScoped public class Translator { - private final TranslatorHelper helper + private final TranslatorHelper helper; Translator(TranslatorHelper helper) { <1> this.helper = helper; @@ -128,9 +129,9 @@ It's also not necessary to add `@Inject` if there is only one constructor presen <2> An initializer method must be annotated with `@Inject`. <3> An initializer may accept multiple parameters - each one is an injection point. -== _Q: You talked about some qualifiers?_ +== You talked about some qualifiers? -A: https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#qualifiers[Qualifiers, window="_blank"] are annotations that help the container to distinguish beans that implement the same type. +https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#qualifiers[Qualifiers, window="_blank"] are annotations that help the container to distinguish beans that implement the same type. As we already said a bean is assignable to an injection point if it has all the required qualifiers. If you declare no qualifier at an injection point the `@Default` qualifier is assumed. @@ -165,15 +166,15 @@ This bean would be assignable to `@Inject @Superior Translator` and `@Inject @Su The reason is that `@Inject Translator` is automatically transformed to `@Inject @Default Translator` during typesafe resolution. And since our `SuperiorTranslator` does not declare `@Default` only the original `Translator` bean is assignable. -== _Q: Looks good. What is the bean scope?_ +== Looks good. What is the bean scope? The scope of a bean determines the lifecycle of its instances, i.e. when and where an instance should be created and destroyed. NOTE: Every bean has exactly one scope. -== _Q: What scopes can I actually use in my Quarkus application?_ +== What scopes can I actually use in my Quarkus application? -A: You can use all the built-in scopes mentioned by the specification except for `javax.enterprise.context.ConversationScoped`. +You can use all the built-in scopes mentioned by the specification except for `javax.enterprise.context.ConversationScoped`. [options="header",cols="1,1"] |=== @@ -183,18 +184,39 @@ A: You can use all the built-in scopes mentioned by the specification except for |`@javax.inject.Singleton` | Just like `@ApplicationScoped` except that no client proxy is used. The instance is created when an injection point that resolves to a @Singleton bean is being injected. |`@javax.enterprise.context.RequestScoped` | The bean instance is associated with the current _request_ (usually an HTTP request). |`@javax.enterprise.context.Dependent` | This is a pseudo-scope. The instances are not shared and every injection point spawns a new instance of the dependent bean. The lifecycle of dependent bean is bound to the bean injecting it - it will be created and destroyed along with the bean injecting it. -|`@javax.enterprise.context.SessionScoped` | This scope is backed by an `javax.servlet.http.HttpSession` object. It's only available if the `quarkus-undertow` extension is used. +|`@javax.enterprise.context.SessionScoped` | This scope is backed by a `javax.servlet.http.HttpSession` object. It's only available if the `quarkus-undertow` extension is used. |=== NOTE: There can be other custom scopes provided by Quarkus extensions. For example, `quarkus-narayana-jta` provides `javax.transaction.TransactionScoped`. +== `@ApplicationScoped` and `@Singleton` look very similar. Which one should I choose for my Quarkus application? + +It depends ;-). + +A `@Singleton` bean has no <> and hence an instance is _created eagerly_ when the bean is injected. By contrast, an instance of an `@ApplicationScoped` bean is _created lazily_, i.e. +when a method is invoked upon an injected instance for the first time. + +Furthermore, client proxies only delegate method invocations and thus you should never read/write fields of an injected `@ApplicationScoped` bean directly. +You can read/write fields of an injected `@Singleton` safely. + +`@Singleton` should have a slightly better performance because the is no indirection (no proxy that delegates to the current instance from the context). + +On the other hand, you cannot mock `@Singleton` beans using <>. + +`@ApplicationScoped` beans can be also destroyed and recreated at runtime. +Existing injection points just work because the injected proxy delegates to the current instance. + +Therefore, we recommend to stick with `@ApplicationScoped` by default unless there's a good reason to use `@Singleton`. + [[client_proxies]] -== _Q: I don't undestand the concept of client proxies._ +== I don't understand the concept of client proxies. Indeed, the https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#client_proxies[client proxies, window="_blank"] could be hard to grasp but they provide some useful functionality. A client proxy is basically an object that delegates all method invocations to a target bean instance. It's a container construct that implements `io.quarkus.arc.ClientProxy` and extends the bean class. +IMPORTANT: Client proxies only delegate method invocations. So never read or write a field of a normal scoped bean, otherwise you will work with non-contextual or stale data. + .Generated Client Proxy Example [source,java] ---- @@ -227,16 +249,16 @@ Client proxies allow for: * In rare cases it's practical to destroy the beans manually. A direct injected reference would lead to a stale bean instance. -== _Q: OK. You said that there are several kinds of beans?_ +== OK. You said that there are several kinds of beans? -A: Yes. In general, we distinguish: +Yes. In general, we distinguish: 1. Class beans 2. Producer methods 3. Producer fields 4. Synthetic beans -NOTE: Synthetic beans are usually provided by extensions. Therefore, we not going to cover them in this guide. +NOTE: Synthetic beans are usually provided by extensions. Therefore, we are not going to cover them in this guide. Producer methods and fields are useful if you need additional control over instantiation of a bean. They are also useful when integrating third-party libraries where you don't control the class source and may not add additional annotations etc. @@ -285,9 +307,9 @@ No scope annotation is declared and so it's defaulted to `@Dependent`. There's more about producers. You can declare qualifiers, inject dependencies into the producer methods parameters, etc. -You can read more about producers for example in the https://docs.jboss.org/weld/reference/latest/en-US/html/beanscdi.html#_producer_methods[Weld docs, window="_blank"]. +You can read more about producers for example in the https://docs.jboss.org/weld/reference/latest/en-US/html/producermethods.html[Weld docs, window="_blank"]. -== _Q: OK, injection looks cool. What other services are provided?_ +== OK, injection looks cool. What other services are provided? === Lifecycle Callbacks @@ -339,9 +361,9 @@ public class LoggingInterceptor { Logger logger; @AroundInvoke <5> - Objec logInvocation(InvocationContext context) { + Object logInvocation(InvocationContext context) { // ...log before - Objec ret = context.proceed(); <6> + Object ret = context.proceed(); <6> // ...log after return ret; } @@ -375,7 +397,7 @@ class TaskCompleted { class ComplicatedService { @Inject - Event event; <1> + Event event; <1> void doSomething() { // ... diff --git a/docs/src/main/asciidoc/centralized-log-management.adoc b/docs/src/main/asciidoc/centralized-log-management.adoc index ea235ab3a159c..84561341aae5e 100644 --- a/docs/src/main/asciidoc/centralized-log-management.adoc +++ b/docs/src/main/asciidoc/centralized-log-management.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Centralized log management (Graylog, Logstash, Fluentd) @@ -23,14 +23,14 @@ The following examples will all be based on the same example application that yo Create an application with the `quarkus-logging-gelf` extension. You can use the following Maven command to create it: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=gelf-logging \ -DclassName="org.acme.quickstart.GelfLoggingResource" \ -Dpath="/gelf-logging" \ - -Dextensions="logging-gelf" + -Dextensions="resteasy,logging-gelf" ---- If you already have your Quarkus project configured, you can add the `logging-gelf` extension @@ -136,7 +136,7 @@ You can do it from the Graylog web console (System -> Input -> Select GELF UDP) This curl example will create a new Input of type GELF UDP, it uses the default login from Graylog (admin/admin). -[source,shell] +[source,bash] ---- curl -H "Content-Type: application/json" -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "X-Requested-By: curl" -X POST -v -d \ '{"title":"udp input","configuration":{"recv_buffer_size":262144,"bind_address":"0.0.0.0","port":12201,"decompress_size_limit":8388608},"type":"org.graylog2.inputs.gelf.udp.GELFUDPInput","global":true}' \ @@ -227,7 +227,7 @@ Launch your application, you should see your logs arriving inside the Elastic St First, you need to create a Fluentd image with the needed plugins: elasticsearch and input-gelf. You can use the following Dockerfile that should be created inside a `fluentd` directory. -[source] +[source,dockerfile] ---- FROM fluent/fluentd:v1.3-debian RUN ["gem", "install", "fluent-plugin-elasticsearch", "--version", "3.7.0"] @@ -315,7 +315,7 @@ As opposed to the GELF input, the Syslog input will not render multiline logs in First, you need to create a Fluentd image with the elasticsearch plugin. You can use the following Dockerfile that should be created inside a `fluentd` directory. -[source] +[source,dockerfile] ---- FROM fluent/fluentd:v1.3-debian RUN ["gem", "install", "fluent-plugin-elasticsearch", "--version", "3.7.0"] diff --git a/docs/src/main/asciidoc/class-loading-reference.adoc b/docs/src/main/asciidoc/class-loading-reference.adoc index 36a77678a99bd..aaf6077792429 100644 --- a/docs/src/main/asciidoc/class-loading-reference.adoc +++ b/docs/src/main/asciidoc/class-loading-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Class Loading Reference @@ -19,25 +19,6 @@ a normal production Quarkus application. For all other use cases (e.g. tests, dev mode, and building the application) Quarkus uses the class loading architecture outlined here. -== Reading Class Bytecode - -It is important to use the correct `ClassLoader`. The recommended approach is to get it by calling the -`Thread.currentThread().getContextClassLoader()` method. - -Example: - - -[source,java,subs=attributes+] ----- -@BuildStep -GeneratedClassBuildItem instrument(final CombinedIndexBuildItem index) { - final String classname = "com.example.SomeClass"; - final ClassLoader cl = Thread.currentThread().getContextClassLoader(); - final byte[] originalBytecode = IoUtil.readClassAsBytes(cl, classname); - final byte[] enhancedBytecode = ... // class instrumentation from originalBytecode - return new GeneratedClassBuildItem(true, classname, enhancedBytecode)); -} ----- == Bootstrapping Quarkus @@ -191,3 +172,32 @@ file). Simply add an `excludedArtifacts` section as shown below: This should only be done if the extension depends on a newer version of these artifacts. If the extension does not bring in a replacement artifact as a dependency then classes the application needs might end up missing. + +== Configuring Class Loading + +It is possible to configure some aspects of class loading in dev and test mode. This can be done using `application.properties`. +Note that class loading config is different to normal config, in that it does not use the standard Quarkus config mechanisms +(as it is needed too early), so only supports `application.properties`. The following options are supported. + + +include::{generated-dir}/config/quarkus-class-loading-configuration-class-loading-config.adoc[opts=optional, leveloffset=+1] + +== Reading Class Bytecode + +It is important to use the correct `ClassLoader`. The recommended approach is to get it by calling the +`Thread.currentThread().getContextClassLoader()` method. + +Example: + + +[source,java,subs=attributes+] +---- +@BuildStep +GeneratedClassBuildItem instrument(final CombinedIndexBuildItem index) { + final String classname = "com.example.SomeClass"; + final ClassLoader cl = Thread.currentThread().getContextClassLoader(); + final byte[] originalBytecode = IoUtil.readClassAsBytes(cl, classname); + final byte[] enhancedBytecode = ... // class instrumentation from originalBytecode + return new GeneratedClassBuildItem(true, classname, enhancedBytecode)); +} +---- \ No newline at end of file diff --git a/docs/src/main/asciidoc/cli-tooling.adoc b/docs/src/main/asciidoc/cli-tooling.adoc index a76ad0e8108a4..c1358c0e00db8 100644 --- a/docs/src/main/asciidoc/cli-tooling.adoc +++ b/docs/src/main/asciidoc/cli-tooling.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Building Quarkus apps with Quarkus Command Line Interface (CLI) @@ -24,24 +24,32 @@ binary there. Then in your shell profile (for Bash shell edit '~/.bash_profile'), export the 'QUARKUS_HOME' folder and add that to your 'PATH': -[source,shell] +[source,bash] +---- export QUARKUS_HOME=/path/to/quarkus-cli export PATH="$PATH:$QUARKUS_HOME" +---- Reload your terminal or do: -[source,shell] +[source,bash] +---- source ~/.bash_profile +---- Now you can run the Quarkus CLI: [source,shell] +---- $ quarkus --help +---- This will display the help information with all the available commands. [source,shell] +---- $ quarkus -i +---- This will start the Quarkus CLI in interactive mode. @@ -54,7 +62,9 @@ As with the native CLI we recommend that you copy the binary to a specific folde The jar file requires Java 8 or newer to run. To start the CLI: [source,shell] +---- $ java -jar quarkus-cli.jar +---- The jar file CLI accepts all the same options and commands as the native binary. @@ -67,7 +77,9 @@ Note: In the examples below switch out 'quarkus' with 'java -jar quarkus-cli.jar To create a new project we use the create-project command: [source,shell] +---- $ quarkus create-project hello-world +---- This will create a folder called 'hello-world' in your current working directory using default groupId, artifactId and version values @@ -77,25 +89,33 @@ To specify the groupId, artifactId and version values, use the '--groupid', '--artifactid' and '--version' options: [source,shell] +---- $ quarkus create-project --groupid=com.foo --artifactId=bar --version=1.0 hello-world +---- Use the help option to display all the possible options: [source,shell] +---- $ quarkus create-project --help +---- == Dealing with extensions The Quarkus CLI can obtain a list of the available extensions with: [source,shell] +---- $ quarkus list-extensions +---- To more easily get an overview and only display the extension names: [source,shell] +---- $ quarkus list-extensions -n +---- == Adding extension(s) @@ -104,7 +124,9 @@ The Quarkus CLI can add Quarkus extensions to your project with the 'add-extensi command: [source,shell] +---- $ quarkus add-extension --extension=hibernate-validator /path/to/my/project +---- The argument path either needs to be the base folder for the project or a direct path to the build file. @@ -114,7 +136,9 @@ build file. To start dev mode from the Quarkus CLI do: [source,shell] +---- $ quarkus dev /path/to/my/project +---- As with 'add-extension' the argument path needs to be the base folder for the project or a direct path to the build file. diff --git a/docs/src/main/asciidoc/command-mode-reference.adoc b/docs/src/main/asciidoc/command-mode-reference.adoc index 76ae957b596d9..f6f762241e35b 100644 --- a/docs/src/main/asciidoc/command-mode-reference.adoc +++ b/docs/src/main/asciidoc/command-mode-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Command Mode Applications @@ -113,7 +113,7 @@ As command mode applications will often require arguments to be passed on the co mvn compile quarkus:dev -Dquarkus.args='--help' ---- -The same can be achived with Gradle: +The same can be achieved with Gradle: ---- ./gradlew quarkusDev --quarkus-args='--help' diff --git a/docs/src/main/asciidoc/config-reference.adoc b/docs/src/main/asciidoc/config-reference.adoc new file mode 100644 index 0000000000000..0e3aaaae3e37e --- /dev/null +++ b/docs/src/main/asciidoc/config-reference.adoc @@ -0,0 +1,834 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Configuration Reference Guide + +include::./attributes.adoc[] + +:numbered: +:sectnums: +:sectnumlevels: 4 +:toc: + +In this reference guide we're going to describe various aspects of Quarkus configuration. +A Quarkus application and Quarkus itself (core and extensions) are both configured via the same mechanism that leverages the https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config] API. +Quarkus configuration is based on https://github.com/smallrye/smallrye-config[SmallRye Config], an implementation of the MicroProfile Config specification. +All of the standard features are supported. +Moreover, there are several additional features which are made available by the SmallRye Config project as well as by Quarkus itself. + +TIP: If you're looking for information how to make a Quarkus extension configurable then see the <> guide. + +[[configuration_sources]] +== Configuration Sources + +By default, Quarkus reads configuration properties from several sources (in decreasing priority): + +1. <> +2. <> +3. <> placed in the current working directory +4. <> +5. <>, i.e. `src/main/resources/application.properties` + +[[system_properties]] +=== System properties + +* for a runner jar: `java -Dquarkus.datasource.password=youshallnotpass -jar target/quarkus-app/quarkus-run.jar` +* for a native executable: `./target/myapp-runner -Dquarkus.datasource.password=youshallnotpass` + +[[environment_variables]] +=== Environment variables + +* for a runner jar: `export QUARKUS_DATASOURCE_PASSWORD=youshallnotpass ; java -jar target/quarkus-app/quarkus-run.jar` +* for a native executable: `export QUARKUS_DATASOURCE_PASSWORD=youshallnotpass ; ./target/myapp-runner` + +NOTE: Environment variables names are following the conversion rules of link:https://github.com/eclipse/microprofile-config/blob/master/spec/src/main/asciidoc/configsources.asciidoc#default-configsources[Eclipse MicroProfile Config]. + +[[env_file]] +=== File named `.env` placed in the current working directory + +.Example `.env` file +[source,properties] +---- +QUARKUS_DATASOURCE_PASSWORD=youshallnotpass <1> +---- +<1> The name `QUARKUS_DATASOURCE_PASSWORD` is converted using the same rules as for <>. + +For dev mode, this file can be placed in the root of the project, but it is advised to **not** check it in to version control. + +NOTE: Environment variables without a configuration profile defined in `.env` file will overwrite all its related profiles in `application.properties`, e.g. `%test.application.value` is overwritten by `APPLICATION_VALUE` in `.env` file. + +[[pwd_config_application_file]] +=== An `application.properties` file placed in `$PWD/config/` + +By placing an `application.properties` file inside a directory named `config` which resides in the directory where the application runs, any runtime properties defined in that file will override the default configuration. +Furthermore any runtime properties added to this file that were not part of the original `application.properties` file +_will also_ be taken into account. +This works in the same way for runner jar and the native executable. + +NOTE: The `config/application.properties` feature is available in development mode as well. To make use of it, `config/application.properties` needs to be placed inside the build tool's output directory (`target` for Maven and `build/classes/java/main` for Gradle). +Keep in mind however that any cleaning operation from the build tool like `mvn clean` or `gradle clean` will remove the `config` directory as well. + +[[application_properties_file]] +=== An application configuration file + +This is the main application configuration file located in `src/main/resources/application.properties`. + +.Example `application.properties` file +[source,properties] +---- +greeting.message=hello <1> +quarkus.http.port=9090 <2> +---- +<1> This is a user-defined configuration property. +<2> This is a configuration property consumed by the `quarkus-vertx-http` extension. + +TIP: Quarkus supports the use of <> in the `application.properties` file. + +== Embedding a configuration file inside a dependency + +You can embed a configuration file inside one of your dependencies by adding a `META-INF/microprofile.properties` configuration file to it +(this is a standard functionality of MicroProfile Config). + +When you add this dependency to your application, its configuration properties will be merged. + +You can override the properties coming from it inside your `application.properties` that takes precedence over it. + +== Injecting configuration properties + +Quarkus uses https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config] annotations to inject the configuration properties in the application. + +[source,java] +---- +@ConfigProperty(name = "greeting.message") <1> +String message; +---- +<1> You can use `@Inject @ConfigProperty` or just `@ConfigProperty`. +The `@Inject` annotation is not necessary for members annotated with `@ConfigProperty`. +This behavior differs from https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config]. + +NOTE: If the application attempts to inject a configuration property that is not set, an error is thrown, thus allowing you to quickly know when your configuration is complete. + +.More `@ConfigProperty` Examples +[source,java] +---- +@ConfigProperty(name = "greeting.message") <1> +String message; + +@ConfigProperty(name = "greeting.suffix", defaultValue="!") <2> +String suffix; + +@ConfigProperty(name = "greeting.name") +Optional name; <3> +---- +<1> If you do not provide a value for this property, the application startup fails with `javax.enterprise.inject.spi.DeploymentException: No config value of type [class java.lang.String] exists for: greeting.message`. +<2> The default value is injected if the configuration does not provide a value for `greeting.suffix`. +<3> This property is optional - an empty `Optional` is injected if the configuration does not provide a value for `greeting.name`. + +== Programmatically access the configuration + +You can also access the configuration programmatically. +It can be handy to achieve dynamic lookup, or retrieve configured values from classes that are neither CDI beans or JAX-RS resources. + +You can access the configuration programmatically using `org.eclipse.microprofile.config.ConfigProvider.getConfig()` such as in: + +[source,java] +---- +String databaseName = ConfigProvider.getConfig().getValue("database.name", String.class); +Optional maybeDatabaseName = ConfigProvider.getConfig().getOptionalValue("database.name", String.class); +---- + +[[using_configproperties]] +== Using @ConfigProperties + +As an alternative to injecting multiple related configuration values in the way that was shown in the previous example, +users can also use the `@io.quarkus.arc.config.ConfigProperties` annotation to group these properties together. + +For the greeting properties above, a `GreetingConfiguration` class could be created like so: + +[source,java] +---- +package org.acme.config; + +import io.quarkus.arc.config.ConfigProperties; +import java.util.Optional; + +@ConfigProperties(prefix = "greeting") <1> +public class GreetingConfiguration { + + private String message; + private String suffix = "!"; <2> + private Optional name; + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getSuffix() { + return suffix; + } + + public void setSuffix(String suffix) { + this.suffix = suffix; + } + + public Optional getName() { + return name; + } + + public void setName(Optional name) { + this.name = name; + } +} +---- +<1> `prefix` is optional. If not set then the prefix to be used will be determined by the class name. In this case it would still be `greeting` (since the `Configuration` suffix is removed). If the class were named `GreetingExtraConfiguration` then the resulting default prefix would be `greeting-extra`. +<2> `!` will be the default value if `greeting.suffix` is not set. + +This class could then be injected into the `GreetingResource` using the familiar CDI `@Inject` annotation like so: + +[source,java] +---- +@Inject +GreetingConfiguration greetingConfiguration; +---- + +Another alternative style provided by Quarkus is to create `GreetingConfiguration` as an interface like so: + +[source,java] +---- +package org.acme.config; + +import io.quarkus.arc.config.ConfigProperties; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import java.util.Optional; + +@ConfigProperties(prefix = "greeting") +public interface GreetingConfiguration { + + @ConfigProperty(name = "message") <1> + String message(); + + @ConfigProperty(defaultValue = "!") + String getSuffix(); <2> + + Optional getName(); <3> +} +---- +<1> The `@ConfigProperty` annotation is needed because the name of the configuration property that the method corresponds to doesn't follow the getter method naming conventions. +<2> In this case since `name` was not set, the corresponding property will be `greeting.suffix`. +<3> It is unnecessary to specify the `@ConfigProperty` annotation because the method name follows the getter method naming conventions (`greeting.name` being the corresponding property) and no default value is needed. + +When using `@ConfigProperties` on a class or an interface, if the value of one of its fields is not provided, the application startup will fail and a `javax.enterprise.inject.spi.DeploymentException` indicating the missing value information will be thrown. +This does not apply to `Optional` fields and fields with a default value. + +=== Additional notes on @ConfigProperties + +When using a regular class annotated with `@ConfigProperties` the class doesn't necessarily have to declare getters and setters. +Having simple public non-final fields is valid as well. + +Furthermore, the configuration classes support nested object configuration. Suppose there was a need to have an extra layer +of greeting configuration named `content` that would contain a few fields. This could be achieved like so: + +[source,java] +---- +@ConfigProperties(prefix = "greeting") +public class GreetingConfiguration { + + public String message; + public String suffix = "!"; + public Optional name; + public ContentConfig content; <1> + + public static class ContentConfig { + public Integer prizeAmount; + public List recipients; + } +} +---- +<1> The name of the field (not the class name) will determine the name of the properties that are bound to the object. + +Setting the properties would occur in the normal manner, for example in `application.properties` one could have: + +[source,properties] +---- +greeting.message = hello +greeting.name = quarkus +greeting.content.prize-amount=10 +greeting.content.recipients=Jane,John +---- + +Furthermore, classes annotated with `@ConfigProperties` can be annotated with Bean Validation annotations similar to the following example: + +[source,java] +---- +@ConfigProperties(prefix = "greeting") +public class GreetingConfiguration { + + @Size(min = 20) + public String message; + public String suffix = "!"; + +} +---- + +WARNING: For validation to work, the `quarkus-hibernate-validator` extension needs to be present. + +If the validation fails with the given configuration, the application will fail to start and indicate the corresponding validation errors in the log. + +In the case of an interface being annotated with `@ConfigProperties`, the interface is allowed to extend other interfaces and methods from +the entire interface hierarchy are used to bind properties. + +=== Using same ConfigProperties with different prefixes + +Quarkus also supports the use of the same `@ConfigProperties` object with different prefixes for each injection point using the `io.quarkus.arc.config.@ConfigPrefix` annotation. +Say for example that `GreetingConfiguration` from above needs to be used for both the `greeting` prefix and the `other` prefix. +In that case the code would look like so: + +`GreetingConfiguration.java` +[source,java] +---- +@ConfigProperties(prefix = "greeting") +public class GreetingConfiguration { + + @Size(min = 20) + public String message; + public String suffix = "!"; + +} +---- + +`SomeBean.java` +[source,java] +---- +@ApplicationScoped +public class SomeBean { + + @Inject <1> + GreetingConfiguration greetingConfiguration; + + @ConfigPrefix("other") <2> + GreetingConfiguration otherConfiguration; + +} +---- +<1> At this injection point `greetingConfiguration` will use the `greeting` prefix since that is what has been defined on `@ConfigProperties`. +<2> At this injection point `otherConfiguration` will use the `other` prefix from `@ConfigPrefix` instead of the `greeting` prefix. Notice that in this case `@Inject` is not required. + +=== Using a list of objects + +In certain cases it might be necessary to support complex configuration structures that utilize a list of objects as shown in the following example: + +`ComplexConfiguration.java` +[source,java] +---- +@ConfigProperties(prefix = "complex") +public class ComplexConfiguration { + public String name; + public String user; + public String password; + public List inputs; + public List outputs; + + public static class Nested { + public String user; + public String password; + } +} +---- + +Support for such use cases is only available when using YAML configuration with the `quarkus-config-yaml` extension. A corresponding example YAML configuration could be: + +`application.yaml` +[source,yaml] +---- +complex: + name: defaultName + user: defaultUser + password: defaultPassword + inputs: + - user: user + password: secret + - user: otheruser + password: secret2 + outputs: + - user: someuser + password: asecret + - user: someotheruser + password: anothersecret +---- + +WARNING: A limitation of such configuration is that the types used as the generic types of the lists need to be classes and not interfaces. + +[[configuration_profiles]] +== Configuration Profiles + +Quarkus supports the notion of configuration profiles. +These allow you to have multiple configurations in the same file and select between them via a profile name. + +The syntax for this is `%{profile}.config.key=value`. For example if I have the following: + +[source,properties] +---- +quarkus.http.port=9090 +%dev.quarkus.http.port=8181 +---- + +The Quarkus HTTP port will be 9090, unless the `dev` profile is active, in which case it will be 8181. + +To use profiles in the `.env` file, you can follow a `_{PROFILE}_CONFIG_KEY=value` pattern. An equivalent of the above example in an `.env` file would be: + +[source,.env] +---- +QUARKUS_HTTP_PORT=9090 +_DEV_QUARKUS_HTTP_PORT=8181 +---- + +By default, Quarkus has three profiles, although it is possible to use as many as you like. +The default profiles are: + +* *dev* - Activated when in development mode (i.e. `quarkus:dev`) +* *test* - Activated when running tests +* *prod* - The default profile when not running in development or test mode + +There are two ways to set a custom profile, either via the `quarkus.profile` system property or the `QUARKUS_PROFILE` +environment variable. If both are set the system property takes precedence. Note that it is not necessary to +define the names of these profiles anywhere, all that is necessary is to create a config property with the profile +name, and then set the current profile to that name. For example if I want a `staging` profile with a different HTTP port I can add the following to `application.properties`: + +[source,properties] +---- +quarkus.http.port=9090 +%staging.quarkus.http.port=9999 +---- + +And then set the `QUARKUS_PROFILE` environment variable to `staging` to activate my profile. + +[NOTE] +==== +The proper way to check the active profile programmatically is to use the `getActiveProfile` method of `io.quarkus.runtime.configuration.ProfileManager`. + +Using `@ConfigProperty("quarkus.profile")` will *not* work properly. +==== + +=== Default Runtime Profile + +The default Quarkus application runtime profile is set to the profile used to build the application. +For example: +[source,bash] +---- +./mvnw package -Pnative -Dquarkus.profile=prod-aws +./target/my-app-1.0-runner // <1> +---- +<1> The command will run with the `prod-aws` profile. This can be overridden using the `quarkus.profile` system property. + + +[[using_property_expressions]] +== Using Property Expressions + +Quarkus supports the use of property expressions in the `application.properties` file. + +These expressions are resolved when the property is read. +So if your configuration property is a build time configuration property, the property expression will be resolved at build time. +If your configuration property is overridable at runtime, the property expression will be resolved at runtime. + +You can use property expressions both for the Quarkus configuration or for your own configuration properties. + +Property expressions are defined this way: `${my-property-expression}`. + +For example, having the following property: + +[source,properties] +---- +remote.host=quarkus.io +---- +and another property defined as: + +[source,properties] +---- +callable.url=https://${remote.host}/ +---- + +will result in the value of the `callable.url` property being set to: + +[source,properties] +---- +callable.url=https://quarkus.io/ +---- + +Another example would be defining different database servers depending on the profile used: + +[source,properties] +---- +%dev.quarkus.datasource.jdbc.url=jdbc:mysql://localhost:3306/mydatabase?useSSL=false +quarkus.datasource.jdbc.url=jdbc:mysql://remotehost:3306/mydatabase?useSSL=false +---- + +can be simplified by having: + +[source,properties] +---- +%dev.application.server=localhost +application.server=remotehost + +quarkus.datasource.jdbc.url=jdbc:mysql://${application.server}:3306/mydatabase?useSSL=false +---- + +It does result in one more line in this example but the value of `application.server` can be reused in other properties, +diminishing the possibility of typos and providing more flexibility in property definitions. + + +[#combine-property-env-var] +== Combining Property Expressions and Environment Variables + +Quarkus also supports the combination of both property expressions and environment variables. + +Let's assume you have the following property defined in `application.properties`: + +[source,properties] +---- +remote.host=quarkus.io +---- + +You can combine environment variables and property expressions by having a property defined as follows: + +[source,properties] +---- +application.host=${HOST:${remote.host}} +---- + +This will expand the `HOST` environment variable and use the value of the property `remote.host` as the default value if `HOST` is not set. + +For the purpose of this section we used the property `remote.host` we defined previously. +It has to be noted that the value could have been a fixed one such as in: + +[source,properties] +---- +application.host=${HOST:localhost} +---- + +which will result in `localhost` as the default value if `HOST` is not set. + +[[configuring_quarkus]] +== Configuring Quarkus + +Quarkus itself is configured via the same mechanism as your application. Quarkus reserves the `quarkus.` namespace +for its own configuration and the configuration of all of its extensions. For example to configure the HTTP server port you can set `quarkus.http.port` in +`application.properties`. All the Quarkus configuration properties are link:all-config[documented and searchable]. + +[IMPORTANT] +==== +As mentioned above, properties prefixed with `quarkus.` are effectively reserved for configuring Quarkus itself and its extensions. +Therefore `quarkus.` should **never** be used as prefix for application specific properties. + +In the previous examples using `quarkus.message` instead of `greeting.message` would result in unexpected behavior. +==== + +Quarkus does much of its configuration and bootstrap at build time and some configuration properties are read and used during the build. +These properties are _fixed at build time_ and it's not possible to change them at runtime. +You always need to repackage your application in order to reflect changes of such properties. + +TIP: The properties fixed at build time are marked with a lock icon (icon:lock[]) in the link:all-config[list of all configuration options]. + +However, some extensions do define properties that can be _overriden at runtime_. +A canonical example is the database URL, username and password which are only known specifically in your target environment. +This is a tradeoff as the more runtime properties are available, the less build time prework Quarkus can do. +The list of runtime properties is therefore lean. +You can override these runtime properties with the following mechanisms (in decreasing priority) using: + +1. System properties +2. Environment variables +3. An environment file named `.env` placed in the current working directory +4. A configuration file placed in `$PWD/config/application.properties` + +See <> for more details. + +== Generating configuration for your application + +It is also possible to generate an example `application.properties` with all known configuration properties, to make +it easy to see what Quarkus configuration options are available. To do this, run: + +[source,bash] +-- +./mvnw quarkus:generate-config +-- + +This will create a `src/main/resources/application.properties.example` file that contains all the config options +exposed via the extensions you currently have installed. These options are commented out, and have their default value +when applicable. For example this HTTP port config entry will appear as: + + +[source,properties] +-- +# +# The HTTP port +# +#quarkus.http.port=8080 +-- + +Rather than generating an example config file, you can also add these to you actual config file by setting the `-Dfile` +parameter: + +[source,bash] +-- +./mvnw quarkus:generate-config -Dfile=application.properties +-- + +If a config option is already present (commented or not) it will not be added, so it is safe to run this after +adding an additional extension to see what additional options have been added. + +== Clearing properties + +Run time properties which are optional, and which have had values set at build time or which have a default value, +may be explicitly cleared by assigning an empty string to the property. Note that this will _only_ affect +runtime properties, and will _only_ work with properties whose values are not required. + +The property may be cleared by setting the corresponding `application.properties` property, setting the +corresponding system property, or setting the corresponding environment variable. + +[[custom_configuration]] +== Custom Configuration + +=== Custom configuration sources + +You can also introduce custom configuration sources in the standard MicroProfile Config manner. To +do this, you must provide a class which implements either `org.eclipse.microprofile.config.spi.ConfigSource` +or `org.eclipse.microprofile.config.spi.ConfigSourceProvider`. Create a +https://docs.oracle.com/javase/8/docs/api/java/util/ServiceLoader.html[service file] for the +class and it will be detected and installed at application startup. + +=== Custom configuration converters + +You can also use your custom types for configuration values. This can be done by implementing `org.eclipse.microprofile.config.spi.Converter` +and adding its fully qualified class name in the `META-INF/services/org.eclipse.microprofile.config.spi.Converter` file. + +Let us assume you have a custom type like this one: + +[source,java] +---- +package org.acme.config; + +public class MicroProfileCustomValue { + + private final int number; + + public MicroProfileCustomValue(int number) { + this.number = number; + } + + public int getNumber() { + return number; + } +} +---- + +The corresponding converter will look like the one below. Please note that your custom converter class must be `public` and must have +a `public` no-argument constructor. It also must not be `abstract`. + + +[source,java] +---- +package org.acme.config; + +import org.eclipse.microprofile.config.spi.Converter; + +public class MicroProfileCustomValueConverter implements Converter { + + @Override + public MicroProfileCustomValue convert(String value) { + return new MicroProfileCustomValue(Integer.parseInt(value)); + } +} +---- + +Then you need to include the fully qualified class name of the converter in a service file `META-INF/services/org.eclipse.microprofile.config.spi.Converter`. +If you have more converters, simply add their class names in this file as well. One fully qualified class name per line, for example: + +[source] +---- +org.acme.config.MicroProfileCustomValueConverter +org.acme.config.SomeOtherConverter +org.acme.config.YetAnotherConverter +---- + +Please note that `SomeOtherConverter` and `YetAnotherConverter` were added just for demonstration purposes. If you include in this file classes +which are not available at runtime, the converters loading will fail. + +After this is done, you can use your custom type as a configuration value: + +[source,java] +---- +@ConfigProperty(name = "configuration.value.name") +MicroProfileCustomValue value; +---- + +==== Converter priority + +In some cases, you may want to use a custom converter to convert a type which is already converted +by a different converter. In such cases, you can use the `javax.annotation.Priority` annotation to +change converters precedence and make your custom converter of higher priority than the other +in the list. + +By default, if no `@Priority` can be found on a converter, it's registered with a priority of 100 +and all Quarkus core converters are registered with a priority of 200, so depending on which +converter you would like to replace, you need to set a higher value. + +To demonstrate the idea let us implement a custom converter which will take precedence over +`MicroProfileCustomValueConverter` implemented in the previous example. + +[source,java] +---- +package org.acme.config; + +import javax.annotation.Priority; +import org.eclipse.microprofile.config.spi.Converter; + +@Priority(150) +public class MyCustomConverter implements Converter { + + @Override + public MicroProfileCustomValue convert(String value) { + + final int secretNumber; + if (value.startsFrom("OBF:")) { + secretNumber = Integer.parseInt(SecretDecoder.decode(value)); + } else { + secretNumber = Integer.parseInt(value); + } + + return new MicroProfileCustomValue(secretNumber); + } +} +---- + +Since it converts the same value type (namely `MicroProfileCustomValue`) and has a priority +of 150, it will be used instead of a `MicroProfileCustomValueConverter` which has a default +priority of 100. + +NOTE: This new converter also needs to be listed in a service file, i.e. `META-INF/services/org.eclipse.microprofile.config.spi.Converter`. + +[[yaml]] +== YAML for Configuration + +=== Add YAML Config Support + +You might want to use YAML over properties for configuration. +Since link:https://github.com/smallrye/smallrye-config[SmallRye Config] brings support for YAML +configuration, Quarkus supports this as well. + +First you will need to add the Config YAML extension to your `pom.xml`: + +[source,xml] +---- + + io.quarkus + quarkus-config-yaml + +---- + +Or you can alternatively run this command in the directory containing your Quarkus project: + +[source,bash] +---- +./mvnw quarkus:add-extension -Dextensions="config-yaml" +---- + +Now Quarkus can read YAML configuration files. +The config directories and priorities are the same as before. + +NOTE: Quarkus will choose an `application.yaml` over an `application.properties`. +YAML files are just an alternative way to configure your application. +You should decide and keep one configuration type to avoid errors. + +==== Configuration Examples +[source,yaml] +---- +# YAML supports comments +quarkus: + datasource: + db-kind: postgresql + jdbc: + url: jdbc:postgresql://localhost:5432/some-database + username: quarkus + password: quarkus + +# REST Client configuration property +org: + acme: + restclient: + CountriesService/mp-rest/url: https://restcountries.eu/rest + +# For configuration property names that use quotes, do not split the string inside the quotes. +quarkus: + log: + category: + "io.quarkus.category": + level: INFO +---- + +[NOTE] +==== +Quarkus also supports using `application.yml` as the name of the YAML file. The same rules apply for this file as for `application.yaml`. +==== + +=== Profile dependent configuration + +Providing profile dependent configuration with YAML is done like with properties. +Just add the `%profile` wrapped in quotation marks before defining the key-value pairs: + +[source,yaml] +---- +"%dev": + quarkus: + datasource: + db-kind: postgresql + jdbc: + url: jdbc:postgresql://localhost:5432/some-database + username: quarkus + password: quarkus +---- + +=== Configuration key conflicts + +The MicroProfile Configuration specification defines configuration keys as an arbitrary `.`-delimited string. +However, structured formats like YAML naively only support a subset of the possible configuration namespace. +For example, consider the two configuration properties `quarkus.http.cors` and `quarkus.http.cors.methods`. +One property is the prefix of another, so it may not be immediately evident how to specify both keys in your YAML configuration. + +This is solved by using a `null` key (normally represented by `~`) for any YAML property which is a prefix of another one. Here's an example: + +.An example YAML configuration resolving prefix-related key name conflicts +[source,yaml] +---- +quarkus: + http: + cors: + ~: true + methods: GET,PUT,POST +---- + +In general, `null` YAML keys are not included in assembly of the configuration property name, allowing them to be used to +any level for disambiguating configuration keys. + +== More info on how to configure + +Quarkus relies on SmallRye Config and inherits its features. + +SmallRye Config provides: + +* Additional Config Sources +* Additional Converters +* Indexed Properties +* Parent Profile +* Interceptors for configuration value resolution +* Relocate Configuration Properties +* Fallback Configuration Properties +* Logging +* Hide Secrets + +For more information, please check the +link:https://smallrye.io/docs/smallrye-config/index.html[SmallRye Config documentation]. diff --git a/docs/src/main/asciidoc/config.adoc b/docs/src/main/asciidoc/config.adoc index b330837b7d24b..4dd23e6e4dd65 100644 --- a/docs/src/main/asciidoc/config.adoc +++ b/docs/src/main/asciidoc/config.adoc @@ -1,12 +1,14 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Configuring Your Application include::./attributes.adoc[] +:toc: + Hardcoded values in your code are a _no go_ (even if we all did it at some point ;-)). In this guide, we learn how to configure your application. @@ -32,7 +34,7 @@ The solution is located in the `config-quickstart` {quickstarts-tree-url}/config First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -51,19 +53,34 @@ It generates: * an `org.acme.config.GreetingResource` resource * an associated test -== Injecting configuration value +== Create the configuration + +By default, Quarkus reads configuration properties from <>. +For the purpose of this guide, we will use an application configuration file located in `src/main/resources/application.properties`. +Edit the file with the following content: + +[source,properties] +---- +# Your configuration properties +greeting.message = hello +greeting.name = quarkus +---- -Quarkus uses https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config] to inject the configuration in the application. -The injection uses the `@ConfigProperty` annotation. +== Injecting configuration properties + +Quarkus uses https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config] annotations to inject the configuration properties in the application. [source,java] ---- -@ConfigProperty(name = "greeting.message") +@ConfigProperty(name = "greeting.message") <1> String message; ---- +<1> You can use `@Inject @ConfigProperty` or just `@ConfigProperty`. +The `@Inject` annotation is not necessary for members annotated with `@ConfigProperty`. +This behavior differs from https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config]. -NOTE: When injecting a configured value, you can use `@Inject @ConfigProperty` or just `@ConfigProperty`. -The `@Inject` annotation is not necessary for members annotated with `@ConfigProperty`, a behavior which differs from https://microprofile.io/project/eclipse/microprofile-config[MicroProfile Config] +NOTE: If the application attempts to inject a configuration property that is not set, an error is thrown. +So you can quickly know when your configuration is complete. Edit the `org.acme.config.GreetingResource`, and introduce the following configuration properties: @@ -93,19 +110,6 @@ public String hello() { } ---- - -== Create the configuration - -By default, Quarkus reads `application.properties`. -Edit the `src/main/resources/application.properties` with the following content: - -[source,shell] ----- -# Your configuration properties -greeting.message = hello -greeting.name = quarkus ----- - Once set, check the application with: [source,shell] @@ -114,15 +118,14 @@ $ curl http://localhost:8080/greeting hello quarkus! ---- -TIP: If the application requires configuration values and these values are not set, an error is thrown. -So you can quickly know when your configuration is complete. +TIP: As an alternative to injecting multiple related configuration values, you can also use the `@io.quarkus.arc.config.ConfigProperties` annotation to group these properties together. +See the <> for more information. == Update the test We also need to update the functional test to reflect the changes made to the endpoint. Edit the `src/test/java/org/acme/config/GreetingResourceTest.java` file and change the content of the `testHelloEndpoint` method to: - [source,java] ---- package org.acme.config; @@ -156,12 +159,12 @@ Open your browser to http://localhost:8080/greeting. Changing the configuration file is immediately reflected. You can add the `greeting.suffix`, remove the other properties, change the values, etc. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. == Programmatically access the configuration -You can access the configuration programmatically. +You can also access the configuration programmatically. It can be handy to achieve dynamic lookup, or retrieve configured values from classes that are neither CDI beans or JAX-RS resources. You can access the configuration programmatically using `org.eclipse.microprofile.config.ConfigProvider.getConfig()` such as in: @@ -172,190 +175,28 @@ String databaseName = ConfigProvider.getConfig().getValue("database.name", Strin Optional maybeDatabaseName = ConfigProvider.getConfig().getOptionalValue("database.name", String.class); ---- -== Using @ConfigProperties - -As an alternative to injecting multiple related configuration values in the way that was shown in the previous example, -users can also use the `@io.quarkus.arc.config.ConfigProperties` annotation to group these properties together. +== Configuration Profiles -For the greeting properties above, a `GreetingConfiguration` class could be created like so: - -[source,java] ----- -package org.acme.config; - -import io.quarkus.arc.config.ConfigProperties; -import java.util.Optional; - -@ConfigProperties(prefix = "greeting") <1> -public class GreetingConfiguration { - - private String message; - private String suffix = "!"; <2> - private Optional name; - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - public String getSuffix() { - return suffix; - } - - public void setSuffix(String suffix) { - this.suffix = suffix; - } - - public Optional getName() { - return name; - } - - public void setName(Optional name) { - this.name = name; - } -} ----- -<1> `prefix` is optional. If not set then the prefix to be used will be determined by the class name. In this case it would still be `greeting` (since the `Configuration` suffix is removed). If the class were named `GreetingExtraConfiguration` then the resulting default prefix would be `greeting-extra`. -<2> `!` will be the default value if `greeting.suffix` is not set - -This class could then be injected into the `GreetingResource` using the familiar CDI `@Inject` annotation like so: - -[source,java] ----- -@Inject -GreetingConfiguration greetingConfiguration; ----- - -Another alternative style provided by Quarkus is to create `GreetingConfiguration` as an interface like so: - -[source,java] ----- -package org.acme.config; - -import io.quarkus.arc.config.ConfigProperties; -import org.eclipse.microprofile.config.inject.ConfigProperty; -import java.util.Optional; - -@ConfigProperties(prefix = "greeting") -public interface GreetingConfiguration { - - @ConfigProperty(name = "message") <1> - String message(); - - @ConfigProperty(defaultValue = "!") - String getSuffix(); <2> - - Optional getName(); <3> -} ----- -<1> The `@ConfigProperty` annotation is needed because the name of the configuration property that the method corresponds to doesn't follow the getter method naming conventions -<2> In this case since `name` was not set, the corresponding property will be `greeting.suffix`. -<3> It is unnecessary to specify the `@ConfigProperty` annotation because the method name follows the getter method naming conventions (`greeting.name` being the corresponding property) and no default value is needed. - -When using `@ConfigProperties` on a class or an interface, if the value of one of its fields is not provided, the application startup will fail and a `javax.enterprise.inject.spi.DeploymentException` indicating the missing value information will be thrown. -This does not apply to `Optional` fields and fields with a default value. - -=== Additional notes on @ConfigProperties - -When using a regular class annotated with `@ConfigProperties` the class doesn't necessarily have to declare getters and setters. -Having simple public non-final fields is valid as well. - -Furthermore, the configuration classes support nested object configuration. Suppose there was a need to have an extra layer -of greeting configuration named `hidden` that would contain a few fields. This could be achieved like so: - -[source,java] ----- -@ConfigProperties(prefix = "greeting") -public class GreetingConfiguration { - - public String message; - public String suffix = "!"; - public Optional name; - public HiddenConfig hidden; <1> - - public static class HiddenConfig { - public Integer prizeAmount; - public List recipients; - } -} ----- -<1> The name of the field (not the class name) will determine the name of the properties that are bound to the object. +Quarkus supports the notion of configuration profiles. +These allow you to have multiple configuration values in the same file and select between them via a profile name. -Setting the properties would occur in the normal manner, for example in `application.properties` one could have: +The syntax for this is `%{profile}.config.key=value`. For example if I have the following: [source,properties] ---- -greeting.message = hello -greeting.name = quarkus -greeting.hidden.prize-amount=10 -greeting.hidden.recipients=Jane,John ----- - -Furthermore, classes annotated with `@ConfigProperties` can be annotated with Bean Validation annotations similar to the following example: - -[source,java] ----- -@ConfigProperties(prefix = "greeting") -public class GreetingConfiguration { - - @Size(min = 20) - public String message; - public String suffix = "!"; - -} ----- - -WARNING: For validation to work, the `quarkus-hibernate-validator` extension needs to be present. - -If the validation fails with the given configuration, the application will fail to start and indicate the corresponding validation errors in the log. - -In the case of an interface being annotated with `@ConfigProperties`, the interface is allowed to extend other interfaces and methods from -the entire interface hierarchy are used to bind properties. - -=== Using same ConfigProperties with different prefixes - -Quarkus also supports the use of the same `@ConfigProperties` object with different prefixes for each injection point using the `io.quarkus.arc.config.@ConfigPrefix` annotation. -Say for example that `GreetingConfiguration` from above needs to be used for both the `greeting` prefix and the `other` prefix. -In that case the code would look like so: - -`GreetingConfiguration.java` -[source,java] ----- -@ConfigProperties(prefix = "greeting") -public class GreetingConfiguration { - - @Size(min = 20) - public String message; - public String suffix = "!"; - -} ----- - -`SomeBean.java` -[source,java] +quarkus.http.port=9090 +%dev.quarkus.http.port=8181 ---- -@ApplicationScoped -public class SomeBean { - @Inject <1> - GreetingConfiguration greetingConfiguration; +Then the Quarkus HTTP port will be 9090, unless the `dev` profile is active, in which case it will be 8181. - @ConfigPrefix("other") <2> - GreetingConfiguration otherConfiguration; - -} ----- -<1> At this injection point `greetingConfiguration` will use the `greeting` prefix since that is what has been defined on `@ConfigProperties`. -<2> At this injection point `otherConfiguration` will use the `other` prefix from `@ConfigPrefix` instead of the `greeting` prefix. Notice that in this case `@Inject` is not required. +See the <> for more information about configuration profiles. == Configuring Quarkus Quarkus itself is configured via the same mechanism as your application. Quarkus reserves the `quarkus.` namespace for its own configuration. For example to configure the HTTP server port you can set `quarkus.http.port` in -`application.properties`. +`application.properties`. All the Quarkus configuration properties are link:all-config[documented and searchable]. [IMPORTANT] ==== @@ -365,457 +206,18 @@ therefore `quarkus.` should **never** be used as prefix for application specific In the previous examples using `quarkus.message` instead of `greeting.message` would result in unexpected behavior. ==== -=== List of all configuration properties - -All the Quarkus configuration properties are link:all-config[documented and searcheable]. - -=== Generating configuration for your application - -It is also possible to generate an example `application.properties` with all known configuration properties, to make -it easy to see what Quarkus configuration options are available. To do this, run: - -[source,shell] --- -./mvnw quarkus:generate-config --- +Quarkus does much of its configuration and bootstrap at build time and some configuration properties are read and used during the build. +These properties are _fixed at build time_ and it's not possible to change them at runtime. +You always need to repackage your application in order to reflect changes of such properties. -This will create a `src/main/resources/application.properties.example` file that contains all the config options -exposed via the extensions you currently have installed. These options are commented out, and have their default value -when applicable. For example this HTTP port config entry will appear as: +TIP: The properties fixed at build time are marked with a lock icon (icon:lock[]) in the link:all-config[list of all configuration options]. - -[source,properties] --- -# -# The HTTP port -# -#quarkus.http.port=8080 --- - -Rather than generating an example config file, you can also add these to you actual config file by setting the `-Dfile` -parameter: - -[source,shell] --- -./mvnw quarkus:generate-config -Dfile=application.properties --- - -If a config option is already present (commented or not) it will not be added, so it is safe to run this after -adding an additional extension to see what additional options have been added. - -== Overriding properties at runtime - -Quarkus does much of its configuration and bootstrap at build time. -Most properties will then be read and set during the build time step. -To change them, make sure to repackage your application. - -[source,shell] --- -./mvnw clean package --- - -Extensions do define _some_ properties as overridable at runtime. +However, some extensions do define properties _overridable at runtime_. A canonical example is the database URL, username and password which is only known specifically in your target environment. -This is a tradeoff as the more runtime properties are available, the less build time prework Quarkus can do. The list of runtime properties is therefore lean. - -You can override these runtime properties with the following mechanisms (in decreasing priority): - -1. using system properties: - * for a runner jar: `java -Dquarkus.datasource.password=youshallnotpass -jar target/myapp-runner.jar` - * for a native executable: `./target/myapp-runner -Dquarkus.datasource.password=youshallnotpass` -2. using environment variables: - * for a runner jar: `export QUARKUS_DATASOURCE_PASSWORD=youshallnotpass ; java -jar target/myapp-runner.jar` - * for a native executable: `export QUARKUS_DATASOURCE_PASSWORD=youshallnotpass ; ./target/myapp-runner` -3. using an environment file named `.env` placed in the current working directory containing the line `QUARKUS_DATASOURCE_PASSWORD=youshallnotpass` (for dev mode, this file can be placed in the root of the project, but it is advised to not check it in to version control) -4. using a configuration file placed in `$PWD/config/application.properties` - * By placing an `application.properties` file inside a directory named `config` which resides in the directory where the application runs, any runtime properties defined -in that file will override the default configuration. Furthermore any runtime properties added to this file that were not part of the original `application.properties` file -_will also_ be taken into account. - * This works in the same way for runner jar and the native executable - -NOTE: Environment variables names are following the conversion rules of link:https://github.com/eclipse/microprofile-config/blob/master/spec/src/main/asciidoc/configsources.asciidoc#default-configsources[Eclipse MicroProfile] - -NOTE: The `config/application.properties` features is available in development mode as well. To make use of it, `config/application.properties` needs to be placed inside the build tool's output directory (`target` for Maven and `build/classes/java/main` for Gradle). -Keep in mind however that any cleaning operation from the build tool like `mvn clean` or `gradle clean` will remove the `config` directory as well. - -=== Configuration Profiles - -Quarkus supports the notion of configuration profiles. These allow you to have multiple configuration in the same file and -select between them via a profile name. - -The syntax for this is `%{profile}.config.key=value`. For example if I have the following: - -[source,properties] ----- -quarkus.http.port=9090 -%dev.quarkus.http.port=8181 ----- - -The Quarkus HTTP port will be 9090, unless the `dev` profile is active, in which case it will be 8181. - -By default Quarkus has three profiles, although it is possible to use as many as you like. The default profiles are: - -* *dev* - Activated when in development mode (i.e. `quarkus:dev`) -* *test* - Activated when running tests -* *prod* - The default profile when not running in development or test mode - -There are two ways to set a custom profile, either via the `quarkus.profile` system property or the `QUARKUS_PROFILE` -environment variable. If both are set the system property takes precedence. Note that it is not necessary to -define the names of these profiles anywhere, all that is necessary is to create a config property with the profile -name, and then set the current profile to that name. For example if I want a `staging` profile with a different HTTP port -I can add the following to `application.properties`: - -[source,properties] ----- -quarkus.http.port=9090 -%staging.quarkus.http.port=9999 ----- - -And then set the `QUARKUS_PROFILE` environment variable to `staging` to activate my profile. - -[NOTE] -==== -The proper way to check the active profile programmatically is to use the `getActiveProfile` method of `io.quarkus.runtime.configuration.ProfileManager`. - -Using `@ConfigProperty("quarkus.profile")` will *not* work properly. -==== - -=== Using Property Expressions - -Quarkus supports the use of property expressions in the `application.properties` file. - -These expressions are resolved when the property is read. -So if your configuration property is a build time configuration property, the property expression will be resolved at build time. -If your configuration property is overridable at runtime, the property expression will be resolved at runtime. - -You can use property expressions both for the Quarkus configuration or for your own configuration properties. - -Property expressions are defined this way: `${my-property-expression}`. - -For example, having the following property: - -[source,properties] ----- -remote.host=quarkus.io ----- -and another property defined as: - -[source,properties] ----- -callable.url=https://${remote.host}/ ----- - -will result in the value of the `callable.url` property being set to: - -[source,properties] ----- -callable.url=https://quarkus.io/ ----- - -Another example would be defining different database servers depending on the profile used: - -[source,properties] ----- -%dev.quarkus.datasource.url=jdbc:mysql://localhost:3306/mydatabase?useSSL=false -quarkus.datasource.url=jdbc:mysql://remotehost:3306/mydatabase?useSSL=false ----- - -can be simplified by having: - -[source,properties] ----- -%dev.application.server=localhost -application.server=remotehost - -quarkus.datasource.url=jdbc:mysql://${application.server}:3306/mydatabase?useSSL=false ----- - -It does result in one more line in this example but the value of `application.server` can be reused in other properties, -diminishing the possibility of typos and providing more flexibility in property definitions. - -=== Combining Property Expressions and Environment Variables - -Quarkus also supports the combination of both property expressions and environment variables. - -Let's assume you have following property defined in `application.properties`: - -[source,properties] ----- -remote.host=quarkus.io ----- - -You can combine environment variables and property expressions by having a property defined as follows: - -[source,properties] ----- -application.host=${HOST:${remote.host}} ----- - -This will expand the `HOST` environment variable and use the value of the property `remote.host` as the default value if `HOST` is not set. - -For the purpose of this section we used the property `remote.host` we defined previously. -It has to be noted that the value could have been a fixed one such as in: - -[source,properties] ----- -application.host=${HOST:localhost} ----- - -which will result in `localhost` as the default value if `HOST` is not set. - -=== Clearing properties - -Run time properties which are optional, and which have had values set at build time or which have a default value, -may be explicitly cleared by assigning an empty string to the property. Note that this will _only_ affect -run time properties, and will _only_ work with properties whose values are not required. - -The property may be cleared by setting the corresponding `application.properties` property, setting the -corresponding system property, or setting the corresponding environment variable. - -==== Miscellaneous -The default Quarkus application runtime profile is set to the profile used to build the application. -For example: -[source,shell] ----- -./mvnw package -Pnative -Dquarkus.profile=prod-aws -./target/my-app-1.0-runner // <1> ----- -<1> The command will run with the `prod-aws` profile. This can be overridden using the `quarkus.profile` system property. - -== Custom Configuration - -=== Custom configuration sources - -You can also introduce custom configuration sources in the standard MicroProfile Config manner. To -do this, you must provide a class which implements either `org.eclipse.microprofile.config.spi.ConfigSource` -or `org.eclipse.microprofile.config.spi.ConfigSourceProvider`. Create a -https://docs.oracle.com/javase/8/docs/api/java/util/ServiceLoader.html[service file] for the -class and it will be detected and installed at application startup. - -=== Custom configuration converters - -You can also use your custom types as a configuration values. This can be done by implementing `org.eclipse.microprofile.config.spi.Converter` -and adding its fully qualified class name in the `META-INF/services/org.eclipse.microprofile.config.spi.Converter` file. - -Let us assume you have a custom type like this one: - -[source,java] ----- -package org.acme.config; - -public class MicroProfileCustomValue { - - private final int number; - - public MicroProfileCustomValue(int number) { - this.number = number; - }; - - public int getNumber() { - return number; - } -} ----- - -The corresponding converter will look like the one below. Please note that your custom converter class must be `public` and must have -a `public` no-argument constructor. It also must not be `abstract`. - - -[source,java] ----- -package org.acme.config; - -import org.eclipse.microprofile.config.spi.Converter; - -public class MicroProfileCustomValueConverter implements Converter { - - @Override - public MicroProfileCustomValue convert(String value) { - return new MicroProfileCustomValue(Integer.valueOf(value)); - } -} ----- - -Then you need to include the fully qualified class name of the converter in a service file `META-INF/services/org.eclipse.microprofile.config.spi.Converter`. -If you have more converters, simply add their class names in this file as well. Single fully qualified class name per line, for example: - -[source] ----- -org.acme.config.MicroProfileCustomValueConverter -org.acme.config.SomeOtherConverter -org.acme.config.YetAnotherConverter ----- - -Please note that `SomeOtherConverter` and `YetAnotherConverter` were added just for a demonstration. If you include in this file classes -which are not available at runtime, the converters loading will fail. - -After this is done you can use your custom type as a configuration value: - -[source,java] ----- -@ConfigProperty(name = "configuration.value.name") -MicroProfileCustomValue value; ----- - -==== Converter priority - -In some cases, you may want to use a custom converter to convert a type which is already converted -by a different converter. In such cases, you can use the `javax.annotation.Priority` annotation to -change converters precedence and make your custom converter of higher priority than the other -on the list. - -By default, if no `@Priority` can be found on a converter, it's registered with a priority of 100 -and all Quarkus core converters are registered with a priority of 200, so depending on which -converter you would like to replace, you need to set a higher value. - -To demonstrate the idea let us implement a custom converter which will take precedence over -`MicroProfileCustomValueConverter` implemented in the previous example. - -[source,java] ----- -package org.acme.config; - -import javax.annotation.Priority; -import org.eclipse.microprofile.config.spi.Converter; - -@Priority(150) -public class MyCustomConverter implements Converter { - - @Override - public MicroProfileCustomValue convert(String value) { - - final int secretNumber; - if (value.startsFrom("OBF:")) { - secretNumber = Integer.valueOf(SecretDecoder.decode(value)); - } else { - secretNumber = Integer.valueOf(value); - } - - return new MicroProfileCustomValue(secretNumber); - } -} ----- - -Since it converts the same value type (namely `MicroProfileCustomValue`) and has a priority -of 150, it will be used instead of a `MicroProfileCustomValueConverter` which has a default -priority of 100. - -NOTE: This new converter also needs to be listed in a service file, i.e. `META-INF/services/org.eclipse.microprofile.config.spi.Converter`. - -[[yaml]] -== YAML for Configuration - -=== Add YAML Config Support - -You might want to use YAML over properties for configuration. -Since link:https://github.com/smallrye/smallrye-config[SmallRye Config] brings support for YAML -configuration, Quarkus supports this as well. - -First you will need to add the YAML extension to your `pom.xml`: - -[source,xml] ----- - - io.quarkus - quarkus-config-yaml - ----- - -Or you can alternatively run this command in the directory containing your Quarkus project: - -[source,bash] ----- -./mvnw quarkus:add-extension -Dextensions="config-yaml" ----- - -Now Quarkus can read YAML configuration files. -The config directories and priorities are the same as before. - -NOTE: Quarkus will choose an `application.yaml` over an `application.properties`. -YAML files are just an alternative way to configure your application. -You should decide and keep one configuration type to avoid errors. - -==== Configuration Examples -[source,yaml] ----- -# YAML supports comments -quarkus: - datasource: - url: jdbc:postgresql://localhost:5432/some-database - driver: org.postgresql.Driver - username: quarkus - password: quarkus - -# REST Client configuration property -org: - acme: - restclient: - CountriesService/mp-rest/url: https://restcountries.eu/rest - -# For configuration property names that use quotes, do not split the string inside the quotes. -quarkus: - log: - category: - "io.quarkus.category": - level: INFO ----- - -[NOTE] -==== -Quarkus also supports using `application.yml` as the name of the YAML file. The same rules apply for this file as for `application.yaml`. -==== - -=== Profile dependent configuration - -Providing profile dependent configuration with YAML is done like with properties. -Just add the `%profile` wrapped in quotation marks before defining the key-value pairs: - -[source,yaml] ----- -"%dev": - quarkus: - datasource: - url: jdbc:postgresql://localhost:5432/some-database - driver: org.postgresql.Driver - username: quarkus - password: quarkus ----- - -=== Configuration key conflicts - -The MicroProfile Configuration specification defines configuration keys as an arbitrary `.`-delimited string. -However, structured formats like YAML naively only support a subset of the possible configuration namespace. -For example, consider the two configuration properties `quarkus.http.cors` and `quarkus.http.cors.methods`. -One property is the prefix of another, so it may not be immediately evident how to specify both keys in your YAML configuration. - -This is solved by using a null key (normally represented by `~`) for any YAML property which is a prefix of another one. Here's an example: - -.An example YAML configuration resolving prefix-related key name conflicts -[source,yaml] ----- -quarkus: - http: - cors: - ~: true - methods: GET,PUT,POST ----- - -In general, null YAML keys are not included in assembly of the configuration property name, allowing them to be used to -any level for disambiguating configuration keys. - -== More info on how to configure - -Quarkus relies on SmallRye Config and inherits its features. - -SmallRye Config provides: -* Additional Config Sources -* Additional Converters -* Interceptors for configuration value resolution -* Relocate Configuration Properties -* Fallback Configuration Properties -* Logging -* Hide Secrets +1. System properties +2. Environment variables +3. An environment file named `.env` placed in the current working directory +4. A configuration file placed in `$PWD/config/application.properties` -For more information, please check the -link:https://smallrye.io/docs/smallrye-config/index.html[SmallRye Config documentation]. +See the <> for more information. diff --git a/docs/src/main/asciidoc/consul-config.adoc b/docs/src/main/asciidoc/consul-config.adoc index 09525166fd1fd..64652eee8dfd0 100644 --- a/docs/src/main/asciidoc/consul-config.adoc +++ b/docs/src/main/asciidoc/consul-config.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Reading properties from Consul @@ -35,7 +35,7 @@ This Key-Value store is what the `quarkus-consul-config` extension interacts wit There are various ways to start Consul that vary in complexity, but for the purposes of this guide, we elect to start a single Consul server with no persistence via Docker, like so: -[source] +[source,bash] ---- docker run --rm --name consul -p 8500:8500 -p 8501:8501 consul:1.7 agent -dev -ui -client=0.0.0.0 -bind=0.0.0.0 --https-port=8501 ---- @@ -46,7 +46,7 @@ Please consult the https://www.consul.io/docs/install[documentation] to learn mo First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ diff --git a/docs/src/main/asciidoc/container-image.adoc b/docs/src/main/asciidoc/container-image.adoc index a4a0da120f0c4..f3b7e3ae4c2db 100644 --- a/docs/src/main/asciidoc/container-image.adoc +++ b/docs/src/main/asciidoc/container-image.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Container Images @@ -25,7 +25,7 @@ when all that is needed is the ability to push to a container image registry. To use this feature, add the following extension to your project: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="container-image-jib" ---- @@ -48,7 +48,7 @@ The extension `quarkus-container-image-docker` is using the Docker binary and th To use this feature, add the following extension to your project. -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="container-image-docker" ---- @@ -75,7 +75,7 @@ The creation of such objects is being taken care of by the Quarkus Kubernetes ex To build a container image for your project, `quarkus.container-image.build=true` needs to be set using any of the ways that Quarkus supports. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -Dquarkus.container-image.build=true ---- @@ -84,7 +84,7 @@ To build a container image for your project, `quarkus.container-image.build=true To push a container image for your project, `quarkus.container-image.push=true` needs to be set using any of the ways that Quarkus supports. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -Dquarkus.container-image.push=true ---- @@ -97,7 +97,7 @@ It does not make sense to use multiple extension as part of the same build. When For example, if both `container-image-docker` and `container-image-s2i` are present and the user needs to use `container-image-docker`: -[source] +[source,properties] ---- quarkus.container-image.builder=docker ---- @@ -110,6 +110,22 @@ The following properties can be used to customize the container image build proc include::{generated-dir}/config/quarkus-container-image.adoc[opts=optional, leveloffset=+1] +==== Using CI Environments + +Various CI environments provide a ready to use container-image registry which can be combined with the container-image Quarkus extensions in order to +effortlessly create and push a Quarkus application to said registry. + +For example, https://gitlab.com/[GitLab] provides such a registry and in the provided CI environment, +makes available the `CI_REGISTRY_IMAGE` environment variable +(see GitLab's https://docs.gitlab.com/ee/ci/variables/[documentation]) for more information), which can be used in Quarkus like so: + +[source,properties] +---- +quarkus.container-image.image=${CI_REGISTRY_IMAGE} +---- + +NOTE: See link:config.adoc#combine-property-env-var[this] for more information on how to combine properties with environment variables. + === Jib Options In addition to the generic container image options, the `container-image-jib` also provides the following options: diff --git a/docs/src/main/asciidoc/context-propagation.adoc b/docs/src/main/asciidoc/context-propagation.adoc index 12c976ea5a5e9..147ca885f8545 100644 --- a/docs/src/main/asciidoc/context-propagation.adoc +++ b/docs/src/main/asciidoc/context-propagation.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Context Propagation in Quarkus @@ -101,7 +101,7 @@ you need manual context propagation. You can do that by injecting a `ThreadConte or `ManagedExecutor` that will propagate every context. For example, here we use the link:vertx[Vert.x Web Client] to get the list of Star Wars people, then store them in the database using link:hibernate-orm-panache[Hibernate ORM with Panache] (all in the same transaction) before returning -them to the client as JSON using link:rest-json[JSON-B or Jackson]: +them to the client as JSON using link:rest-json[Jackson or JSON-B]: [source,java] ---- @@ -112,12 +112,11 @@ them to the client as JSON using link:rest-json[JSON-B or Jackson]: @Transactional @GET @Path("/people") - @Produces(MediaType.APPLICATION_JSON) public CompletionStage> people() throws SystemException { // Create a REST client to the Star Wars API WebClient client = WebClient.create(vertx, new WebClientOptions() - .setDefaultHost("swapi.co") + .setDefaultHost("swapi.dev") .setDefaultPort(443) .setSsl(true)); // get the list of Star Wars people, with context capture @@ -173,3 +172,21 @@ You need to include the following modules to get RxJava2 support:
    ---- + +== Context Propagation for CDI + +In terms of CDI, `@RequestScoped`, `@ApplicationScoped` and `@Singleton` beans get propagated and are available in other threads. +`@Dependent` beans as well as any custom scoped beans cannot be automatically propagated via CDI Context Propagation. + + +`@ApplicationScoped` and `@Singleton` beans are always active scopes and as such are easy to deal with - context propagation tasks can work with those beans so long as the CDI container is running. +However, `@RequestScoped` beans are a different story. They are only active for a short period of time which can be bound either to HTTP request or some other request/task when manually activated/deactivated. +In this case user must be aware that once the original thread gets to an end of a request, it will terminate the context, calling `@PreDestroy` on those beans and then clearing them from the context. +Subsequent attempts to access those beans from other threads can result in unexpected behaviour. +It is therefore recommended to make sure all tasks using request scoped beans via context propagation are performed in such a manner that they don't outlive the original request duration. + + +[NOTE] +==== +Due to the above described behavior, it is recommended to avoid using `@PreDestroy` on `@RequestScoped` beans when working with Context Propagation in CDI. +==== diff --git a/docs/src/main/asciidoc/credentials-provider.adoc b/docs/src/main/asciidoc/credentials-provider.adoc index f37587ac2b306..d9ba21f0b9abf 100644 --- a/docs/src/main/asciidoc/credentials-provider.adoc +++ b/docs/src/main/asciidoc/credentials-provider.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using a Credentials Provider diff --git a/docs/src/main/asciidoc/datasource.adoc b/docs/src/main/asciidoc/datasource.adoc index 51c536ec7934b..f011b9794b121 100644 --- a/docs/src/main/asciidoc/datasource.adoc +++ b/docs/src/main/asciidoc/datasource.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Datasources @@ -22,7 +22,7 @@ Both are configured via unified and flexible configuration. [NOTE] ==== Agroal is a modern, light weight connection pool implementation designed for very high performance and scalability, -and features first class integration with the other components in Quarkus, such as security, transaction management components, health metrics. +and features first class integration with the other components in Quarkus, such as security, transaction management components, health, and metrics. ==== This guide will explain how to: @@ -40,6 +40,39 @@ please refer to the link:reactive-sql-clients[Reactive SQL clients guide]. This is a quick introduction to datasource configuration. If you want a better understanding of how all this works, this guide has a lot more information in the subsequent paragraphs. +=== Zero Config Setup (DevServices) + +When testing or running in dev mode Quarkus can even provide you with a zero config database out of the box, a feature +we refer to as DevServices. Depending on your database type you may need docker installed in order to use this feature. DevServices +is supported for the following open source databases: + +* Postgresql (container) +* MySQL (container) +* MariaDB (container) +* H2 (in-process) +* Apache Derby (in-process) +* DB2 (container) (requires license acceptance) +* MSSQL (container) (requires license acceptance) + + +If you want to use DevServices then all you need to do is include the relevant extension for the type of database you want (either reactive or +JDBC, or both), and don't configure a database URL, username and password, Quarkus will provide the database and you can just start +coding without worrying about config. + +If you are using a proprietary database such as DB2 or MSSQL you will need to accept the license agreement. To do this +create a `src/main/resources/container-license-acceptance.txt` files in your project and add a line with the image +name and tag of the database. By default Quarkus uses the default image for the current version of Testcontainers, if +you attempt to start Quarkus the resulting failure will tell you the exact image name in use for you to add to the +file. + +An example file is shown below: + +.src/main/resources/container-license-acceptance.txt +---- +ibmcom/db2:11.5.0.0a +mcr.microsoft.com/mssql/server:2017-CU12 +---- + === JDBC datasource Add the `agroal` extension plus one of `jdbc-db2`, `jdbc-derby`, `jdbc-h2`, `jdbc-mariadb`, `jdbc-mssql`, `jdbc-mysql`, or `jdbc-postgresql`. @@ -48,14 +81,16 @@ Then configure your datasource: [source, properties] ---- -quarkus.datasource.db-kind=postgresql +quarkus.datasource.db-kind=postgresql <1> quarkus.datasource.username= quarkus.datasource.password= quarkus.datasource.jdbc.url=jdbc:postgresql://localhost:5432/hibernate_orm_test -quarkus.datasource.jdbc.min-size=4 quarkus.datasource.jdbc.max-size=16 ---- +<1> If you only have a single JDBC extension, or you are running tests and only have a single test scoped JDBC extension installed then this is +optional. If there is only one possible extension we assume this is the correct one, and if a driver has been added with test scope then +we assume that this should be used in testing. === Reactive datasource @@ -65,20 +100,22 @@ Then configure your reactive datasource: [source, properties] ---- -quarkus.datasource.db-kind=postgresql +quarkus.datasource.db-kind=postgresql <1> quarkus.datasource.username= quarkus.datasource.password= quarkus.datasource.reactive.url=postgresql:///your_database quarkus.datasource.reactive.max-size=20 ---- +<1> As specified above this is optional. == Default datasource A datasource can be either a JDBC datasource, a reactive one or both. It all depends on how you configure it and which extensions you added to your project. -To define a datasource, start with the following: +To define a datasource, start with the following (note that this is only required if you have more than one +database type installed): [source, properties] ---- @@ -136,7 +173,7 @@ First, you will need to add the `quarkus-agroal` dependency to your project. You can add it using a simple Maven command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="agroal" ---- @@ -164,7 +201,7 @@ See <> if you want to use a JDBC driver for another database. [NOTE] ==== The H2 and Derby databases can normally be configured to run in "embedded mode"; -the extension does not support compiling the embedded database engine into native images. +the extension does not support compiling the embedded database engine into native executables. Read <> (below) for suggestions regarding integration testing. ==== @@ -173,7 +210,7 @@ As usual, you can install the extension using `add-extension`. To install the PostgreSQL driver dependency for instance, run the following command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="jdbc-postgresql" ---- @@ -347,10 +384,8 @@ So it is not currently possible to create multiple reactive datasources. [NOTE] ==== -Currently, Hibernate ORM supports only one persistence unit which uses the default datasource. -If you want to use multiple datasources, you will need to manipulate them directly. - -Multiple persistence units support is planned for a future version of Quarkus. +The Hibernate ORM extension link:hibernate-orm#multiple-persistence-units[supports defining several persistence units using configuration properties]. +For each persistence unit, you can point to the datasource of your choice. ==== Defining multiple datasources works exactly the same way as defining a single datasource, with one important change: @@ -369,25 +404,26 @@ each with its own configuration. quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.users.db-kind=h2 quarkus.datasource.users.username=username1 quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 quarkus.datasource.users.jdbc.max-size=11 quarkus.datasource.inventory.db-kind=h2 quarkus.datasource.inventory.username=username2 quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 quarkus.datasource.inventory.jdbc.max-size=12 ---- Notice there is an extra bit in the key. The syntax is as follows: `quarkus.datasource.[optional name.][datasource property]`. +NOTE: Even when only one database extension is installed, named databases need to specify at least one build time +property so that Quarkus knows they exist. Generally this will be the `db-kind` property, although you can also +specify DevServices properties to create named datasources (covered later in this guide). + === Named Datasource Injection When using multiple datasources, each `DataSource` also has the `io.quarkus.agroal.DataSource` qualifier with the name of the datasource as the value. @@ -412,25 +448,25 @@ AgroalDataSource inventoryDataSource; If you are using the `quarkus-smallrye-health` extension, the `quarkus-agroal` and reactive client extensions will automatically add a readiness health check to validate the datasource. -So when you access the `/health/ready` endpoint of your application you will have information about the datasource validation status. +When you access the `/q/health/ready` endpoint of your application you will have information about the datasource validation status. If you have multiple datasources, all datasources will be checked and the status will be `DOWN` as soon as there is one datasource validation failure. This behavior can be disabled via the property `quarkus.datasource.health.enabled`. == Datasource Metrics -If you are using the `quarkus-smallrye-metrics` extension, `quarkus-agroal` can expose some data source metrics on the -`/metrics` endpoint. This can be turned on by setting the property `quarkus.datasource.metrics.enabled` to true. +If you are using the `quarkus-micrometer` or `quarkus-smallrye-metrics` extension, `quarkus-agroal` can expose some data source metrics on the +`/q/metrics` endpoint. This can be turned on by setting the property `quarkus.datasource.metrics.enabled` to true. For the exposed metrics to contain any actual values, it is necessary that metric collection is enabled internally -by Agroal mechanisms. By default, this metric collection mechanism gets turned on for all data sources if the `quarkus-smallrye-metrics` +by Agroal mechanisms. By default, this metric collection mechanism gets turned on for all data sources if a metrics extension is present and metrics for the Agroal extension are enabled. If you want to disable metrics for a particular data source, this can be done by setting `quarkus.datasource.jdbc.enable-metrics` to `false` (or `quarkus.datasource..jdbc.enable-metrics` for a named datasource). -This disables collecting the metrics as well as exposing them in the `/metrics` endpoint, +This disables collecting the metrics as well as exposing them in the `/q/metrics` endpoint, because it does not make sense to expose metrics if the mechanism to collect them is disabled. Conversely, setting `quarkus.datasource.jdbc.enable-metrics` to `true` (or `quarkus.datasource..jdbc.enable-metrics` for a named datasource) explicitly can be used to enable collection of metrics even if -the `quarkus-smallrye-metrics` extension is not in use. +a metrics extension is not in use. This can be useful if you need to access the collected metrics programmatically. They are available after calling `dataSource.getMetrics()` on an injected `AgroalDataSource` instance. If collection of metrics is disabled for this data source, all values will be zero. @@ -441,6 +477,28 @@ If the Narayana JTA extension is also available, integration is automatic. You can override this by setting the `transactions` configuration property - see the <> below. +== DevServices (Configuration Free Databases) + +As mentioned above Quarkus supports a feature called DevServices that allows you to create datasources without any config. If +you have a database extension that supports it and no config is provided, Quarkus will automatically start a database (either +using Testcontainers, or by starting a Java DB in process), and automatically configure a connection to this database. + +Production databases need to be configured as normal, so if you want to include a production database config in your +application.properties and continue to use DevServices we recommend that you use the `%prod.` profile to define your +database settings. + +=== Configuring DevServices + +DevServices supports the following config options: + +include::{generated-dir}/config/quarkus-datasource-config-group-dev-services-build-time-config.adoc[opts=optional, leveloffset=+1] + +=== Named Datasources + +When using DevServices the default datasource will always be created, but to specify a named datasource you need to have +at least one build time property so Quarkus knows to create the datasource. In general you will usually either specify +the `db-kind` property, or explicitly enable DevDb: `quarkus.datasource."named".devservices=true`. + [[in-memory-databases]] == Testing with in-memory databases @@ -451,11 +509,11 @@ good reasons to also want the ability to run quick integration tests using the J so this is possible as well. It is important to remember that when configuring H2 (or Derby) to use the embedded engine, -this will work as usual in JVM mode but such an application will not compile into a native image, as the Quarkus extensions only cover for making the JDBC client code compatible with the native compilation step: embedding the whole database engine into a native image is currently not implemented. +this will work as usual in JVM mode but such an application will not compile into a native executable, as the Quarkus extensions only cover for making the JDBC client code compatible with the native compilation step: embedding the whole database engine into a native executable is currently not implemented. If you plan to run such integration tests in the JVM exclusively, it will of course work as usual. -If you want the ability to run such integration test in both JVM and/or native images, we have some cool helpers for you: just add either `@QuarkusTestResource(H2DatabaseTestResource.class)` or `@QuarkusTestResource(DerbyDatabaseTestResource.class)` on any class in your integration tests, this will make sure the test suite starts (and stops) the embedded database into a separate process as necessary to run your tests. +If you want the ability to run such integration test in both JVM and/or native executables, we have some cool helpers for you: just add either `@QuarkusTestResource(H2DatabaseTestResource.class)` or `@QuarkusTestResource(DerbyDatabaseTestResource.class)` on any class in your integration tests, this will make sure the test suite starts (and stops) the embedded database into a separate process as necessary to run your tests. These additional helpers are provided by the artifacts having Maven coordinates `io.quarkus:quarkus-test-h2` and `io.quarkus:quarkus-test-derby`, respectively for H2 and Derby. @@ -473,15 +531,15 @@ public class TestResources { } ---- -This will allow you to test your application even when it's compiled into a native image, +This will allow you to test your application even when it's compiled into a native executable, while the database will run in the JVM as usual. Connect to it using: -[source] +[source,properties] ---- -quarkus.datasource.url=jdbc:h2:tcp://localhost/mem:test -quarkus.datasource.driver=org.h2.Driver +quarkus.datasource.db-kind=h2 +quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:test ---- [[configuration-reference]] @@ -577,6 +635,8 @@ It can run as a server, based on a file, or live completely in memory. All of these options are available as listed above. You can find more information at the https://db.apache.org/derby/docs/10.8/devguide/cdevdvlp17453.html#cdevdvlp17453[official documentation]. +:no-duration-note: true + [[reactive-configuration]] == Reactive Datasource Configuration Reference diff --git a/docs/src/main/asciidoc/deploying-to-azure-cloud.adoc b/docs/src/main/asciidoc/deploying-to-azure-cloud.adoc index e05a006e995d6..e65bd5321dfe1 100644 --- a/docs/src/main/asciidoc/deploying-to-azure-cloud.adoc +++ b/docs/src/main/asciidoc/deploying-to-azure-cloud.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Deploying to Microsoft Azure Cloud diff --git a/docs/src/main/asciidoc/deploying-to-google-cloud.adoc b/docs/src/main/asciidoc/deploying-to-google-cloud.adoc new file mode 100644 index 0000000000000..b969cf6932321 --- /dev/null +++ b/docs/src/main/asciidoc/deploying-to-google-cloud.adoc @@ -0,0 +1,225 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Deploying to Google Cloud Platform (GCP) + +include::./attributes.adoc[] + +This guide covers: + +* Login to Google Cloud +* Deploying a function to Google Cloud Functions +* Deploying a JAR to Google App Engine Standard +* Deploying a Docker image to Google App Engine Flexible Custom Runtimes +* Deploying a Docker image to Google Cloud Run + +== Prerequisites + +For this guide you need: + +* Roughly 1 hour for all modalities +* JDK 11 +* Apache Maven {maven-version} +* https://cloud.google.com/[A Google Cloud Account]. Free accounts work. +* https://cloud.google.com/sdk[Cloud SDK CLI Installed] + +This guide will take as input an application developed in the link:getting-started[Getting Started guide]. + +Make sure you have the getting-started application at hand, or clone the Git repository: `git clone {quickstarts-clone-url}`, +or download an {quickstarts-archive-url}[archive]. The solution is located in the `getting-started` directory. + +== Login to Google Cloud + +Login to Google Cloud is necessary for deploying the application and it can be done as follows: + +[source, subs=attributes+] +---- +gcloud auth login +---- + +== Deploying to Google Cloud Functions + +Quarkus supports deploying your application to Google Cloud Functions via the following extensions: + +- link:gcp-functions[Google Cloud Functions]: Build functions using the Google Cloud Functions API. +- link:gcp-functions-http[Google Cloud Functions HTTP binding]: Build functions using Quarkus HTTP APIs: RESTEasy (JAX-RS), +Undertow (Servlet), Vert.x Web, or link:funqy-http[Funqy HTTP]. +- link:funqy-gcp-functions[Funky Google Cloud Functions]: Build functions using Funqy. + +Each extension supports a specific kind of application development, +follow the specific guides for more information on how to develop, package and deploy your applications using them. + +== Deploying to Google App Engine Standard + +We will only cover the Java 11 runtime as the Java 8 runtime uses its own Servlet engine which is not compatible with Quarkus. + +First of all, make sure to have an App Engine environment initialized for your Google Cloud project, if not, initialize one via `gcloud app create --project=[YOUR_PROJECT_ID]`. + +Then, you will need to create a `src/main/appengine/app.yaml` file, let's keep it minimalistic with only the selected engine: + +[source, yaml] +---- +runtime: java11 +---- + +This will create a default service for your App Engine application. + +Then, you can choose to build the application by yourself or letting `gcloud` or the Google Cloud Maven plugin build it for you. + +=== Building the application manually + +Set up your application to be packaged as an uber-jar via your `application.properties` file: + +[source, properties] +---- +quarkus.package.type=uber-jar +---- + +Use Maven to build the application using `mvn clean package`, it will generate a single JAR that contains all the classes of your application including its dependencies. + +Finally, use `gcloud` to deploy your application as an App Engine service. + +[source, shell script] +---- +gcloud app deploy target/getting-started-1.0.0-SNAPSHOT-runner.jar +---- + +This command will upload your application jar and launch it on App Engine. + +When done, the output will display the URL of your application (target url), you can use it with curl or directly open it in your browser using `gcloud app browse`. + +=== Building the application via gcloud + +You can choose to let `gcloud` build your application for you, this is the simplest way to deploy to App Engine. + +Just launch `gcloud app deploy` in the root of your project, it will upload all your project files (the list can be reduced via the `.gcloudignore` file), +package your JAR via Maven (or Gradle) and launch it on App Engine. + +When done, the output will display the URL of your application (target url), you can use it with curl or directly open it in your browser using `gcloud app browse`. + +=== Building the application via the Google Cloud Maven plugin + +You can also let Maven control the deployment of your application using the App Engine Maven plugin. + +First, add the plugin to your `pom.xml`: + +[source,xml] +---- + + com.google.cloud.tools + appengine-maven-plugin + 2.4.0 + + GCLOUD_CONFIG <1> + gettingstarted + ${project.build.directory}/getting-started-${project.version}-runner.jar <2> + + +---- + +1. Use the default `gcloud` configuration +2. Override the default JAR name to the one generated by the Quarkus Maven plugin + +Then you would be able to use Maven to build and deploy your application to App Engine via `mvn clean package appengine:deploy`. + +When it’s done, the output will display the URL of your application (target URL), you can use it with curl or directly open it in your browser using `gcloud app browse`. + +== Deploying to Google App Engine Flexible Custom Runtimes + +Before all, make sure to have an App Engine environment initialized for your Google Cloud project, if not, initialize one via `gcloud app create --project=[YOUR_PROJECT_ID]`. + +App Engine Flexible Custom Runtimes uses a Docker image to run your application. + +First, create an `app.yaml` file at the root of your project with the following content: + +[source, yaml] +---- +runtime: custom +env: flex +---- + +App Engine Flexible Custom Runtimes deploys your application as a Docker container, you can choose to deploy one of the Dockerfile provided inside your application. + +Both JVM and native executable versions will work. + +To deploy a JVM application: + +- Copy the JVM Dockerfile to the root directory of your project: `cp src/main/docker/Dockerfile.jvm Dockerfile`. +- Build your application using `mvn clean package`. + +To deploy a native application: + +- Copy the native Dockerfile to the root directory of your project: `cp src/main/docker/Dockerfile.native Dockerfile`. +- Build your application as a native executable using `mvn clean package -Dnative`. + +Finally, launch `gcloud app deploy` in the root of your project, it will upload all your project files (the list can be reduced via the `.gcloudignore` file), +build your Dockerfile and launch it on App Engine Flexible custom runtime. + +It uses Cloud Build to build your Docker image and deploy it to Google Container Registry (GCR). + +When done, the output will display the URL of your application (target url), you can use it with curl or directly open it in your browser using `gcloud app browse`. + +NOTE: App Engine Flexible custom runtimes support link:https://cloud.google.com/appengine/docs/flexible/custom-runtimes/configuring-your-app-with-app-yaml#updated_health_checks[health checks], +it is strongly advised to provide them thanks to Quarkus link:microprofile-health[Microprofile Health] support. + +== Deploying to Google Cloud Run + +Google Cloud Run allows you to run your Docker containers inside Google Cloud Platform in a managed way. + +NOTE: By default, Quarkus listens on port 8080, and it's also the Cloud Run default port. +No need to use the `PORT` environment variable defined in Cloud Run to customize the Quarkus HTTP port. + +Cloud Run will use Cloud Build to build your Docker image and deploy it to Google Container Registry (GCR). + +Both JVM and native executable versions will work. + +To deploy a JVM application: + +- Copy the JVM Dockerfile to the root directory of your project: `cp src/main/docker/Dockerfile.jvm Dockerfile`. +- Build your application using `mvn clean package`. + +To deploy a native application: + +- Copy the native Dockerfile to the root directory of your project: `cp src/main/docker/Dockerfile.native Dockerfile`. +- Build your application as a native executable using `mvn clean package -Dnative`. + +Then, create a `.gcloudignore` file to tell gcloud which files should be not be uploaded for Cloud Build, +without it, it defaults to `.gitignore` that usually exclude the target directory where you packaged application has been created. + +In this example, I only exclude the `src` directory: + +[source] +---- +src/ +---- + +Then, use Cloud Build to build your image, it will upload to a Google Cloud Storage bucket all the files of your application (except the ones ignored by the `.gcloudignore`file), +build your Docker image and push it to Google Container Registry (GCR). + +[source, shell script] +---- +gcloud builds submit --tag gcr.io/PROJECT-ID/helloworld +---- + +NOTE: You can also build your image locally and push it to a publicly accessible Docker registry, then use this image in the next step. + +Finally, use Cloud Run to launch your application. + +[source, shell script] +---- +gcloud run deploy --image gcr.io/PROJECT-ID/helloworld --platform managed +---- + +Cloud run will ask you questions on the service name, the region and whether or not unauthenticated calls are allowed. +After you answer to these questions, it will deploy your application. + +When the deployment is done, the output will display the URL to access your application. + +== Going further + +You can find a set of extensions to access various Google Cloud Services in the Quarkiverse (a GitHub organization for Quarkus extensions maintained by the community), +including PubSub, BigQuery, Storage, Spanner, Firestore (visit the repository for an accurate list of supported services). + +You can find some documentation about them in the link:https://github.com/quarkiverse/quarkiverse-google-cloud-services[Quarkiverse Google Cloud Services repository]. diff --git a/docs/src/main/asciidoc/deploying-to-kubernetes.adoc b/docs/src/main/asciidoc/deploying-to-kubernetes.adoc index e261bb8593d82..e009c0d8709ca 100644 --- a/docs/src/main/asciidoc/deploying-to-kubernetes.adoc +++ b/docs/src/main/asciidoc/deploying-to-kubernetes.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Kubernetes extension @@ -27,14 +27,14 @@ To complete this guide, you need: Let's create a new project that contains both the Kubernetes and Jib extensions: -[source,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=kubernetes-quickstart \ -DclassName="org.acme.rest.GreetingResource" \ -Dpath="/greeting" \ - -Dextensions="kubernetes, jib" + -Dextensions="resteasy,kubernetes,jib" cd kubernetes-quickstart ---- @@ -73,7 +73,7 @@ The full source of the `kubernetes.json` file looks something like this: }, "labels" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT", + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT", }, "name" : "test-quarkus-app" }, @@ -82,14 +82,14 @@ The full source of the `kubernetes.json` file looks something like this: "selector" : { "matchLabels" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT", + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT", } }, "template" : { "metadata" : { "labels" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT" + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT" } }, "spec" : { @@ -102,7 +102,7 @@ The full source of the `kubernetes.json` file looks something like this: } } } ], - "image" : "yourDockerUsername/test-quarkus-app:1.0-SNAPSHOT", + "image" : "yourDockerUsername/test-quarkus-app:1.0.0-SNAPSHOT", "imagePullPolicy" : "Always", "name" : "test-quarkus-app" } ] @@ -120,7 +120,7 @@ The full source of the `kubernetes.json` file looks something like this: }, "labels" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT", + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT", }, "name" : "test-quarkus-app" }, @@ -132,7 +132,7 @@ The full source of the `kubernetes.json` file looks something like this: } ], "selector" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT" + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT" }, "type" : "ClusterIP" } @@ -142,17 +142,17 @@ The full source of the `kubernetes.json` file looks something like this: The generated manifest can be applied to the cluster from the project root using `kubectl`: -[source] +[source,bash] ---- kubectl apply -f target/kubernetes/kubernetes.json ---- -An important thing to note about the `Deployment` is that is uses `yourDockerUsername/test-quarkus-app:1.0-SNAPSHOT` as the container image of the `Pod`. +An important thing to note about the `Deployment` is that is uses `yourDockerUsername/test-quarkus-app:1.0.0-SNAPSHOT` as the container image of the `Pod`. The name of the image is controlled by the Jib extension and can be customized using the usual `application.properties`. For example with a configuration like: -[source] +[source,properties] ---- quarkus.container-image.group=quarkus #optional, default to the system user name quarkus.container-image.name=demo-app #optional, defaults to the application name @@ -161,11 +161,27 @@ quarkus.container-image.tag=1.0 #optional, defaults to the application ver The image that will be used in the generated manifests will be `quarkus/demo-app:1.0` +=== Namespace + +By default Quarkus omits the namespace in the generated manifests, rather than enforce the `default` namespace. That means that you can apply the manifest to your chosen namespace when using `kubctl`, which in the example below is `test`: + +[source,bash] +---- +kubectl apply -f target/kubernetes/kubernetes.json -n=test +---- + +To specify the namespace in your manifest customize with the following property in your `application.properties`: + +[source,properties] +---- +quarkus.kubernetes.namespace=mynamespace +---- + === Defining a Docker registry The Docker registry can be specified with the following property: -[source] +[source,properties] ---- quarkus.container-image.registry=my.docker-registry.net ---- @@ -181,13 +197,26 @@ The generated manifests use the Kubernetes link:https://kubernetes.io/docs/conce These labels can be customized using `quarkus.kubernetes.name`, `quarkus.kubernetes.version` and `quarkus.kubernetes.part-of`. For example by adding the following configuration to your `application.properties`: -[source] +[source,properties] ---- quarkus.kubernetes.part-of=todo-app quarkus.kubernetes.name=todo-rest quarkus.kubernetes.version=1.0-rc.1 ---- +[NOTE] +==== +As is described in detail in the <<#openshift, OpenShift>> section, customizing OpenShift (or Knative) properties is done in the same way, but replacing +`kubernetes` with `openshift` (or `knative`). The previous example for OpenShift would look like this: + +[source,properties] +---- +quarkus.openshift.part-of=todo-app +quarkus.openshift.name=todo-rest +quarkus.openshift.version=1.0-rc.1 +---- +==== + The labels in generated resources will look like: [source, json] @@ -203,7 +232,7 @@ The labels in generated resources will look like: To add additional custom labels, for example `foo=bar` just apply the following configuration: -[source] +[source,properties] ---- quarkus.kubernetes.labels.foo=bar ---- @@ -226,10 +255,10 @@ Out of the box, the generated resources will be annotated with version control r Custom annotations can be added in a way similar to labels.For example to add the annotation `foo=bar` and `app.quarkus/id=42` just apply the following configuration: -[source] +[source,properties] ---- quarkus.kubernetes.annotations.foo=bar -quarkus."app.quarkus/id"=42 +quarkus.kubernetes.annotations."app.quarkus/id"=42 ---- [#env-vars] @@ -347,11 +376,22 @@ This would generate the following in the `env` section of your container: It's also possible to use the value from another field to add a new environment variable by specifying the path of the field to be used as a source, as follows: -[source] +[source,properties] ---- quarkus.kubernetes.env.fields.foo=metadata.name ---- +[NOTE] +==== +As is described in detail in the <<#openshift, OpenShift>> section, customizing OpenShift properties is done in the same way, but replacing +`kubernetes` with `openshift`. The previous example for OpenShift would look like this: + +[source,properties] +---- +quarkus.openshift.env.fields.foo=metadata.name +---- +==== + ===== Validation A conflict between two definitions, e.g. mistakenly assigning both a value and specifying that a variable is derived from a field, will result in an error being thrown at build time so that you get the opportunity to fix the issue before you deploy your application to your cluster where it might be more difficult to diagnose the source of the issue. @@ -386,7 +426,7 @@ only generate an environment variable `MY_ENV_VAR=newValue` and issue a warning. The Kubernetes extension allows the user to configure both volumes and mounts for the application. Any volume can be mounted with a simple configuration: -[source] +[source,properties] ---- quarkus.kubernetes.mounts.my-volume.path=/where/to/mount ---- @@ -396,23 +436,51 @@ The volumes themselves can be configured as shown in the sections below. ===== Secret volumes -[source] +[source,properties] ---- quarkus.kubernetes.secret-volumes.my-volume.secret-name=my-secret ---- ===== ConfigMap volumes -[source] +[source,properties] ---- quarkus.kubernetes.config-map-volumes.my-volume.config-map-name=my-secret ---- +==== Passing application configuration + +Quarkus supports passing configuration from external locations (via Smallrye Config). This usually requires setting an additional environment variable or system propertiy. +When you need to use a secret or a config map for the purpose of application configuration, you need to: + +- define a volume +- mount the volume +- create an environment variable for `SMALLRYE_CONFIG_LOCATIONS` + +To simplify things, quarkus provides single step alternative: + +[source,properties] +---- +quarkus.kubernetes.app-secret= +---- + +or + +[source,properties] +---- +quarkus.kubernetes.app-config-map= +---- + +When these properties are used, the generated manifests will contain everything required. +The application config volumes will be created using path: `/mnt/app-secret` and `/mnt/app-config-map` for secrets and configmaps respectively. + +Note: Users may use both properties at the same time. + === Changing the number of replicas: To change the number of replicas from 1 to 3: -[source] +[source,properties] ---- quarkus.kubernetes.replicas=3 ---- @@ -435,12 +503,61 @@ More information about the health extension can be found in the relevant link:mi === Customizing the readiness probe: To set the initial delay of the probe to 20 seconds and the period to 45: -[source] +[source,properties] ---- quarkus.kubernetes.readiness-probe.initial-delay=20s quarkus.kubernetes.readiness-probe.period=45s ---- +=== Add hostAliases +To add entries to a Pod's `/etc/hosts` file (more information can be found in https://kubernetes.io/docs/concepts/services-networking/add-entries-to-pod-etc-hosts-with-host-aliases/[Kubernetes documentation]), just apply the following configuration: + +[source,properties] +---- +quarkus.kubernetes.hostaliases."10.0.0.0".hostnames=foo.com,bar.org +---- + +This would generate the following `hostAliases` section in the `deployment` definition: + +[source,yaml] +---- +kind: Deployment +spec: + template: + spec: + hostAliases: + - hostnames: + - foo.com + - bar.org + ip: 10.0.0.0 +---- + +=== Container Resources Management + +CPU & Memory limits and requests can be applied to a `Container` (more info in https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/[Kubernetes documentation]) using the following configuration: + +[source] +---- +quarkus.kubernetes.resources.requests.memory=64Mi +quarkus.kubernetes.resources.requests.cpu=250m +quarkus.kubernetes.resources.limits.memory=512Mi +quarkus.kubernetes.resources.limits.cpu=1000m +---- + +This would generate the following entry in the `container` section: + +[source, yaml] +---- +containers: + resources: + limits: + cpu: 1000m + memory: 512Mi + requests: + cpu: 250m + memory: 64Mi +---- + === Using the Kubernetes client Applications that are deployed to Kubernetes and need to access the API server will usually make use of the `kubernetes-client` extension: @@ -476,7 +593,7 @@ This extension assumes a couple things: * Users won't be using an image registry and will instead make their container image accessible to the Kubernetes cluster by building it directly into Minikube's Docker daemon. To use Minikube's Docker daemon you must first execute: + -[source] +[source,bash] ---- eval $(minikube -p minikube docker-env) ---- @@ -484,7 +601,7 @@ eval $(minikube -p minikube docker-env) * Applications deployed to Kubernetes won't be accessed via a Kubernetes `Ingress`, but rather as a `NodePort` `Service`. The advantage of doing this is that the URL of an application can be retrieved trivially by executing: + -[source] +[source,bash] ---- minikube service list ---- @@ -513,16 +630,18 @@ The table below describe all the available configuration options. | quarkus.kubernetes.name | String | | ${quarkus.container-image.name} | quarkus.kubernetes.version | String | | ${quarkus.container-image.tag} | quarkus.kubernetes.part-of | String | | -| quarkus.kubernetes.init-containers | Map | | +| quarkus.kubernetes.init-containers | Map | | +| quarkus.kubernetes.namespace | String | | | quarkus.kubernetes.labels | Map | | | quarkus.kubernetes.annotations | Map | | +| quarkus.kubernetes.app-secret | String | | +| quarkus.kubernetes.app-config-map | String | | | quarkus.kubernetes.env-vars | Map | | | quarkus.kubernetes.working-dir | String | | | quarkus.kubernetes.command | String[] | | | quarkus.kubernetes.arguments | String[] | | | quarkus.kubernetes.replicas | int | | 1 | quarkus.kubernetes.service-account | String | | -| quarkus.kubernetes.host | String | | | quarkus.kubernetes.ports | Map | | | quarkus.kubernetes.service-type | ServiceType | | ClusterIP | quarkus.kubernetes.pvc-volumes | Map | | @@ -538,14 +657,21 @@ The table below describe all the available configuration options. | quarkus.kubernetes.liveness-probe | Probe | | ( see Probe ) | quarkus.kubernetes.readiness-probe | Probe | | ( see Probe ) | quarkus.kubernetes.sidecars | Map | | -| quarkus.kubernetes.expose | boolean | | false +| quarkus.kubernetes.ingress.expose | boolean | | false +| quarkus.kubernetes.ingress.host | String | | +| quarkus.kubernetes.ingress.annotations | Map | | | quarkus.kubernetes.headless | boolean | | false +| quarkus.kubernetes.hostaliases | Map | | +| quarkus.kubernetes.resources.requests.cpu | String | | +| quarkus.kubernetes.resources.requests.memory | String | | +| quarkus.kubernetes.resources.limits.cpu | String | | +| quarkus.kubernetes.resources.limits.memory | String | | |==== Properties that use non-standard types, can be referenced by expanding the property. For example to define a `kubernetes-readiness-probe` which is of type `Probe`: -[source] +[source,properties] ---- quarkus.kubernetes.readiness-probe.initial-delay=20s quarkus.kubernetes.readiness-probe.period=45s @@ -564,9 +690,9 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` |==== | Property | Type | Description | Default Value | value | String | | -| secret | String | | -| configmap | String | | -| field | String | | +| secret | String | | +| configmap | String | | +| field | String | | |==== .Probe @@ -574,7 +700,7 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` | Property | Type | Description | Default Value | http-action-path | String | | | exec-action | String | | -| tcp-socket-action | String | | +| tcp-socket-action | String | | | initial-delay | Duration | | 0 | period | Duration | | 30s | timeout | Duration | | 10s @@ -594,16 +720,21 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` | Property | Type | Description | Default Value | image | String | | | env-vars | Env[] | | -| working-dir | String | | +| working-dir | String | | | command | String[] | | | arguments | String[] | | | ports | Port[] | | | mounts | Mount[] | | -| image-pull-policy | ImagePullPolicy | | Always -| liveness-probe | Probe | | -| readiness-probe | Probe | | +| image-pull-policy | ImagePullPolicy | | Always +| liveness-probe | Probe | | +| readiness-probe | Probe | | |==== +.HostAlias +|==== +| Property | Type | Description | Default Value +| hostnames | String[] | list of hostnames | +|==== ==== Mounts and Volumes @@ -611,8 +742,8 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` |==== | Property | Type | Description | Default Value | path | String | | -| sub-path | String | | -| read-only | boolean | | false +| sub-path | String | | +| read-only | boolean | | false |==== .ConfigMapVolume @@ -636,11 +767,11 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` |==== | Property | Type | Description | Default Value | disk-name | String | | -| disk-uri | String | | +| disk-uri | String | | | kind | String | | Managed -| caching-mode | String | | ReadWrite -| fs-type | String | | ext4 -| read-only | boolean | | false +| caching-mode | String | | ReadWrite +| fs-type | String | | ext4 +| read-only | boolean | | false |==== .AwsElasticBlockStoreVolume @@ -648,8 +779,8 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` | Property | Type | Description | Default Value | volume-id | String | | | partition | int | | -| fs-type | String | | ext4 -| read-only | boolean | | false +| fs-type | String | | ext4 +| read-only | boolean | | false |==== .GitRepoVolume @@ -664,47 +795,67 @@ Allowed values: `cluster-ip`, `node-port`, `load-balancer`, `external-name` |==== | Property | Type | Description | Default Value | claim-name | String | | -| read-only | boolean | | false +| read-only | boolean | | false |==== .AzureFileVolume |==== | Property | Type | Description | Default Value | share-name | String | | -| secret-name | String | | -| read-only | boolean | | false +| secret-name | String | | +| read-only | boolean | | false |==== [#openshift] === OpenShift +One way to deploy an application to OpenShift is to use s2i (source to image) to create an image stream from the source and then deploy the image stream: + +[source,bash] +---- +./mvnw quarkus:remove-extension -Dextensions="kubernetes, jib" +./mvnw quarkus:add-extension -Dextensions="openshift" + +oc new-project quarkus-project +./mvnw clean package -Dquarkus.container-image.build=true + +oc new-app --name=greeting quarkus-project/kubernetes-quickstart:1.0.0-SNAPSHOT +oc expose svc/greeting +oc get route +curl /greeting +---- + +See further information in link:deploying-to-openshift[Deploying to OpenShift]. + +A description of OpenShift resources and customisable properties is given below alongside Kubernetes resources to show similarities where applicable. This includes an alternative to `oc new-app ...` above, i.e. `oc apply -f target/kubernetes/openshift.json` . + To enable the generation of OpenShift resources, you need to include OpenShift in the target platforms: -[source] +[source,properties] ---- quarkus.kubernetes.deployment-target=openshift ---- If you need to generate resources for both platforms (vanilla Kubernetes and OpenShift), then you need to include both (comma separated). -[source] +[source,properties] ---- quarkus.kubernetes.deployment-target=kubernetes,openshift ---- -Following the execution of `./mvnw package` you will notice amongst the other files that are created, two files named +Following the execution of `./mvnw package -Dquarkus.container-image.build=true ` you will notice amongst the other files that are created, two files named `openshift.json` and `openshift.yml` in the `target/kubernetes/` directory. These manifests can be deployed as is to a running cluster, using `kubectl`: -[source] +[source,bash] ---- kubectl apply -f target/kubernetes/openshift.json ---- -Openshift users might want to use `oc` instead of `kubectl`: +OpenShift users might want to use `oc` instead of `kubectl`: -[source] +[source,bash] ---- oc apply -f target/kubernetes/openshift.json ---- @@ -724,13 +875,14 @@ The OpenShift resources can be customized in a similar approach with Kubernetes. | quarkus.openshift.init-containers | Map | | | quarkus.openshift.labels | Map | | | quarkus.openshift.annotations | Map | | +| quarkus.openshift.app-secret | String | | +| quarkus.openshift.app-config-map | String | | | quarkus.openshift.env-vars | Map | | | quarkus.openshift.working-dir | String | | | quarkus.openshift.command | String[] | | | quarkus.openshift.arguments | String[] | | | quarkus.openshift.replicas | int | | 1 | quarkus.openshift.service-account | String | | -| quarkus.openshift.host | String | | | quarkus.openshift.ports | Map | | | quarkus.openshift.service-type | ServiceType | | ClusterIP | quarkus.openshift.pvc-volumes | Map | | @@ -746,16 +898,18 @@ The OpenShift resources can be customized in a similar approach with Kubernetes. | quarkus.openshift.liveness-probe | Probe | | ( see Probe ) | quarkus.openshift.readiness-probe | Probe | | ( see Probe ) | quarkus.openshift.sidecars | Map | | -| quarkus.openshift.expose | boolean | | false +| quarkus.openshift.route.expose | boolean | | false +| quarkus.openshift.route.host | String | | +| quarkus.openshift.route.annotations | Map | | | quarkus.openshift.headless | boolean | | false |==== [#knative] === Knative - + To enable the generation of Knative resources, you need to include Knative in the target platforms: -[source] +[source,properties] ---- quarkus.kubernetes.deployment-target=knative ---- @@ -780,7 +934,7 @@ The full source of the `knative.json` file looks something like this: }, "labels" : { "app.kubernetes.io/name" : "test-quarkus-app", - "app.kubernetes.io/version" : "1.0-SNAPSHOT" + "app.kubernetes.io/version" : "1.0.0-SNAPSHOT" }, "name" : "knative. }, @@ -790,7 +944,7 @@ The full source of the `knative.json` file looks something like this: "revisionTemplate" : { "spec" : { "container" : { - "image" : "dev.local/yourDockerUsername/test-quarkus-app:1.0-SNAPSHOT", + "image" : "dev.local/yourDockerUsername/test-quarkus-app:1.0.0-SNAPSHOT", "imagePullPolicy" : "Always" } } @@ -804,7 +958,7 @@ The full source of the `knative.json` file looks something like this: The generated manifest can be deployed as is to a running cluster, using `kubectl`: -[source] +[source,bash] ---- kubectl apply -f target/kubernetes/knative.json ---- @@ -820,6 +974,8 @@ The generated service can be customized using the following properties: | quarkus.knative.init-containers | Map | | | quarkus.knative.labels | Map | | | quarkus.knative.annotations | Map | | +| quarkus.knative.app-secret | String | | +| quarkus.knative.app-config-map | String | | | quarkus.knative.env-vars | Map | | | quarkus.knative.working-dir | String | | | quarkus.knative.command | String[] | | @@ -842,8 +998,20 @@ The generated service can be customized using the following properties: | quarkus.knative.liveness-probe | Probe | | ( see Probe ) | quarkus.knative.readiness-probe | Probe | | ( see Probe ) | quarkus.knative.sidecars | Map | | +| quarkus.knative.revision-name | String | | +| quarkus.knative.traffic | Traffic[] | | ( see Traffic) |==== +.Traffic +|==== +| Property | Type | Description | Default Value +| revision-name | String | A specific revision to which to send this portion of traffic | +| tag | String | Expose a dedicated url for referncing this target | +| latest-revision | Boolean | Optionally provided to indicate that the latest revision should be used for this traffic target | false +| percent | Logn | Indicates the percent of traffic that is be routed to this revision | 100 +|==== + + === Deployment targets Mentioned in the previous sections was the concept of `deployment-target`. This concept allows users to control which Kubernetes manifests will be generated @@ -876,7 +1044,7 @@ Properties referring to config group arrays (e.g. `kubernetes.labels[0]`, `kuber The code below demonstrates the change in `labels` config: -[source] +[source,properties] ---- # Old labels config: kubernetes.labels[0].name=foo @@ -888,7 +1056,7 @@ quarkus.kubernetes.labels.foo=bar The code below demonstrates the change in `env-vars` config: -[source] +[source,properties] ---- # Old env-vars config: kubernetes.env-vars[0].name=foo @@ -908,7 +1076,7 @@ See <<#env-vars>> and more specifically <> for more details. To trigger building and deploying a container image you need to enable the `quarkus.kubernetes.deploy` flag (the flag is disabled by default - furthermore it has no effect during test runs or dev mode). This can be easily done with the command line: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -Dquarkus.kubernetes.deploy=true ---- @@ -988,7 +1156,7 @@ spec: selector: matchLabels: app.kubernetes.io/name: "kubernetes-quickstart" - app.kubernetes.io/version: "1.0-SNAPSHOT" + app.kubernetes.io/version: "1.0.0-SNAPSHOT" template: metadata: annotations: @@ -1000,7 +1168,7 @@ spec: - env: - name: "FOO" <3> value: "BAR" - image: "<>/kubernetes-quickstart:1.0-SNAPSHOT" <4> + image: "<>/kubernetes-quickstart:1.0.0-SNAPSHOT" <4> imagePullPolicy: "Always" name: "kubernetes-quickstart" ports: @@ -1017,3 +1185,67 @@ The provided replicas <1>, labels <2> and environment variables <3> were retain If the resource name does not match the application name (or the overridden name) instead of reusing the resource a new one will be added. Same goes for the container. If the name of the container does not match the application name (or the overridden name), container specific configuration will be ignored. ==== + +== Service Binding + +Quarkus supports binding services to applications via the link:https://github.com/k8s-service-bindings/spec[Service Binding Specification for Kubernetes]. +Specifically Quarkus implements the link:https://github.com/k8s-service-bindings/spec#application-projection[Application Projection] part of the specification, thus allowing +applications running in appropriately configured Kubernetes clusters to consume services (such as a Database or a Broker) without the need for user configuration. + +Currently, the following Quarkus extensions support this feature: + +* quarkus-jdbc-mariadb +* quarkus-jdbc-mssql +* quarkus-jdbc-mysql +* quarkus-jdbc-postgresql +* quarkus-kafka-client +* quarkus-smallrye-reactive-messaging-kafka + +This list of extensions will grow as more services with supported bindings become available on Kubernetes. + +To enable Service Binding support, in addition to one of the currently supported extensions, the `quarkus-kubernetes-service-binding` extension needs to be added to the application dependencies. + +=== Example using PostgreSQL + +If the Kubernetes cluster in which your Quarkus application runs supports provisioning a PostgreSQL database, one could use the following dependencies to take advantage of the Service Binding feature: + +[source,xml] +---- + + io.quarkus + quarkus-jdbc-postgresql + + + io.quarkus + quarkus-kubernetes-service-binding + + + + + io.quarkus + quarkus-hibernate-orm + +---- + +The configuration of the application could be as simple as: + +[source,properties] +---- +quarkus.hibernate-orm.database.generation=validate +---- + +You'll notice that all the configuration for the datasource is absent, as it will be auto-discovered by Quarkus when the application is deployed to Kubernetes. + +=== How does it work? + +When the application is deployed to an appropriately configured Kubernetes cluster, Kubernetes will create on the filesystem a set of files containing the runtime configuration (for example a file containing the URL of the database, another for the username). +Quarkus knows where in the file system to look for these files (as it consults the `SERVICE_BINDING_ROOT` environment variable which is set by Kubernetes) and if they exist, Quarkus knows how to map the values of these files to the appropriate extension configuration properties. + +An example of what the file system looks like can be seen link:https://github.com/quarkusio/quarkus/tree/e7efe6b3efba91b9c4ae26f9318f8397e23e7505/integration-tests/kubernetes-service-binding-jdbc/src/test/resources/k8s-sb[here]. +In that example, the `k8s-sb` directory is meant to be the root of all service bindings. Only one binding which exists in that case and it is named `fruit-db`. This binding has a `type` file indicating that it is a `postgresql` database +while the other files provide the necessary connection information. + +[TIP] +==== +For more details on how the discovery of the available properties works, please read the link:https://github.com/k8s-service-bindings/spec#application-projection[Application Projection] part of the Service Binding specification. +==== diff --git a/docs/src/main/asciidoc/deploying-to-openshift-s2i.adoc b/docs/src/main/asciidoc/deploying-to-openshift-s2i.adoc deleted file mode 100644 index cf1d2d31e1b3a..0000000000000 --- a/docs/src/main/asciidoc/deploying-to-openshift-s2i.adoc +++ /dev/null @@ -1,156 +0,0 @@ -//// -This guide is maintained in the main Quarkus repository -and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc -//// -= Quarkus - Deploying on OpenShift with S2I - -include::./attributes.adoc[] - -This guide covers: - -* The deployment of the application to OpenShift using S2I to build - -== Prerequisites - -For this guide you need: - -* roughly 5 minutes -* having access to an OpenShift cluster. Minishift is a valid option. - -== Solution - -We recommend to follow the instructions in the next sections and build the application step by step. -However, you can go right to the completed example. - -Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. - -The solution is located in the `getting-started` directory. - -== Deploying the application as GraalVM native executable in OpenShift - -In this section, we are going to leverage the S2I build mechanism of OpenShift. -We use Quarkus' GraalVM Native S2I Builder, and therefore do not need a `Dockerfile` in this approach. -You do not need to locally clone the Git repository, as it will be directly built inside OpenShift. -We are going to create an OpenShift `build` executing it: - -[source,shell, subs="attributes"] ----- -# To build the image on OpenShift -oc new-app quay.io/quarkus/ubi-quarkus-native-s2i:{graalvm-flavor}~{quickstarts-clone-url} --context-dir=getting-started --name=quarkus-quickstart-native -oc logs -f bc/quarkus-quickstart-native - -# To create the route -oc expose svc/quarkus-quickstart-native - -# Get the route URL -export URL="http://$(oc get route | grep quarkus-quickstart-native | awk '{print $2}')" -echo $URL -curl $URL/hello/greeting/quarkus ----- - -Your application is accessible at the printed URL. - -Note that GraalVM-based native build are more memory & CPU intensive than regular pure Java builds. -https://docs.openshift.com/container-platform/3.11/dev_guide/builds/advanced_build_operations.html[By default, builds are completed by pods using unbound resources, such as memory and CPU], -but note that https://docs.openshift.com/container-platform/3.11/admin_guide/limits.html#admin-guide-limits[your OpenShift Project may have limit ranges defined]. - -Testing indicates that the "hello, world" getting-started demo application builds in around 2 minutes on typical hardware when the build is given 4 GB of RAM and 4 (virtual) CPUs for concurrency. You therefore may need to increase the respective limits for OpenShift's S2I build containers like so: - -[source,yaml] ----- -apiVersion: "v1" -kind: "BuildConfig" -metadata: - name: "quarkus-quickstart-native" -spec: - resources: - limits: - cpu: '4' - memory: 4Gi ----- - -The following `oc patch` command adds these `limits`, and `oc start-build` launches a new build: - -[source,shell] ----- -oc patch bc/quarkus-quickstart-native -p '{"spec":{"resources":{"limits":{"cpu":"4", "memory":"4Gi"}}}}' - -oc start-build quarkus-quickstart-native ----- - -== Building a minimal runtime container - -As an alternative to creating an application from the S2I build process one can use chained builds to produce a runner image that is minimal in size and does not contain all the dependencies required to build the application. - -The following command will create a chained build that is triggered whenever the quarkus-quickstart-native is built. - -[source,shell] ----- -oc new-build --name=minimal-quarkus-quickstart-native \ - --docker-image=registry.access.redhat.com/ubi7-dev-preview/ubi-minimal \ - --source-image=quarkus-quickstart-native \ - --source-image-path='/home/quarkus/application:.' \ - --dockerfile=$'FROM registry.access.redhat.com/ubi7-dev-preview/ubi-minimal:latest\nCOPY application /application\nCMD /application\nEXPOSE 8080' ----- - -To create a service from the minimal build run the following command: - -[source,shell] ----- -oc new-app minimal-quarkus-quickstart-native - -oc expose svc minimal-quarkus-quickstart-native ----- - -The end result is an image that is less than 40 MB in size (compressed) and does not contain build dependencies like GraalVM, OpenJDK, Maven, etc. - -[TIP] -.Creating build without config. -==== -The minimal build is depending on the S2I build since it is using the output (native runnable application) from the S2I build. However, you do not need to create an application with `oc new-app`. Instead you could use `oc new-build` like this: -[source, shell, subs="attributes"] ----- -oc new-build quay.io/quarkus/ubi-quarkus-native-s2i:{graalvm-flavor}~{quickstarts-clone-url} \ - --context-dir=getting-started --name=quarkus-quickstart-native ----- -==== - -== Deploying the application as Java application in OpenShift - -In this section, we are going to leverage the S2I build mechanism of OpenShift. -We use a Java S2I Builder, and therefore do not need a `Dockerfile` in this approach. -You do not need to locally clone the Git repository, as it will be directly built inside OpenShift. -We are going to create an OpenShift `build` executing it: - -[source,shell, subs="attributes"] ----- -# To build the image on OpenShift -oc new-app registry.access.redhat.com/ubi8/openjdk-11:latest~{quickstarts-clone-url} --context-dir=getting-started --name=quarkus-quickstart -oc logs -f bc/quarkus-quickstart - -# To create the route -oc expose svc/quarkus-quickstart - -# Get the route URL -export URL="http://$(oc get route | grep quarkus-quickstart | awk '{print $2}')" -curl $URL/hello/greeting/quarkus ----- - -Your application is accessible at the printed URL. - -The `.s2i/environment` file in the quickstart sets required variables for the S2I Builder image to find the Quarkus `runner` JAR, and copy the JARs from the `lib/` directory: - -[source,text] ----- -MAVEN_S2I_ARTIFACT_DIRS=target -S2I_SOURCE_DEPLOYMENTS_FILTER=*-runner.jar lib -JAVA_OPTIONS=-Dquarkus.http.host=0.0.0.0 -AB_JOLOKIA_OFF=true ----- - -== Going further - -This guide covered the deployment of a Quarkus application on OpenShift using S2I. -However, there is much more, and the integration with these environments has been tailored to make Quarkus applications execution very smooth. -For instance, the health extension can be used for health check; the configuration support allows mounting the application configuration using config map, the metric extension produces data _scrapable_ by Prometheus and so on. diff --git a/docs/src/main/asciidoc/deploying-to-openshift.adoc b/docs/src/main/asciidoc/deploying-to-openshift.adoc index 543721836edf0..6d52a44fdfd06 100644 --- a/docs/src/main/asciidoc/deploying-to-openshift.adoc +++ b/docs/src/main/asciidoc/deploying-to-openshift.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Deploying on OpenShift @@ -19,19 +19,20 @@ To complete this guide, you need: * JDK 1.8+ installed with `JAVA_HOME` configured appropriately * Apache Maven {maven-version} * access to an OpenShift cluster (Minishift is a viable option) +* OpenShift CLI (Optional. Only required for manually deploying) == Creating the Maven project First, we need a new project that contains the OpenShift extension. This can be done using the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=openshift-quickstart \ -DclassName="org.acme.rest.GreetingResource" \ -Dpath="/greeting" \ - -Dextensions="openshift" + -Dextensions="resteasy,openshift" cd openshift-quickstart ---- @@ -56,53 +57,100 @@ By adding this dependency, we now have the ability to configure the OpenShift re The configuration items that are available can be found in: `io.quarkus.kubernetes.deployment.OpenShiftConfig` class. Furthermore, the items provided by `io.quarkus.deployment.ApplicationConfig` affect the OpenShift resources. -=== Building +=== Build and Deploy (in separate steps) +If the OpenShift extension was not included during the bootstraping of the project nor was it added subsequently (check pom.xml file for it), then it can be added like this: + +[source,bash,subs=attributes+] +---- + ./mvnw quarkus:add-extension -Dextensions="openshift" +---- + Building is handled by the link:container-image#s2i[container-image-s2i] extension. To trigger a build: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -Dquarkus.container-image.build=true ---- The build that will be performed is an s2i binary build. The input of the build is the jar that has been built locally and the output of the build is an ImageStream that is configured to automatically trigger a deployment. +To deploy the container image created in the above step to OpenShift, follow these commands: +[source,bash,subs=attributes+] +---- +oc get is +oc new-app --name=greeting /openshift-quickstart:1.0.0-SNAPSHOT +oc get svc +oc expose svc/greeting +oc get routes +curl http:///greeting +---- + +In the above, `oc get is` will list the image stream created. It should be tagged as /openshift-quickstart:1.0.0-SNAPSHOT. + +Similarly, `oc get route` will list the route URL for the exposed service "greeting" so that you can use it to test the application. + === Non S2i Builds Out of the box the openshift extension is configured to use link:container-image#s2i[container-image-s2i]. However, it's still possible to use other container image extensions like: -- link:container-image#s2i[container-image-docker] -- link:container-image#s2i[container-image-jib] +- link:container-image#docker[container-image-docker] +- link:container-image#jib[container-image-jib] When a non-s2i container image extension is used, an ImageStream is created that is pointing to an external `dockerImageRepository`. The image is built and pushed to the registry and the ImageStream populates the tags that are available in the `dockerImageRepository`. To select which extension will be used for building the image: -[source] ---- +[source,properties] +---- quarkus.container-image.builder=docker ---- +---- or -[source] ---- +[source,properties] +---- quarkus.container-image.builder=jib ---- +---- -=== Deploying +=== Build and Deploy (in a single step) -To trigger a deployment: +If the OpenShift extension was not included during the bootstraping of the project nor was it added subsequently (check pom.xml file for it), then it can be added like this: -[source, subs=attributes+] +[source,bash,subs=attributes+] +---- + ./mvnw quarkus:add-extension -Dextensions="openshift" +---- + +To trigger a build and deployment in a single step: + +[source,bash,subs=attributes+] ---- ./mvnw clean package -Dquarkus.kubernetes.deploy=true ---- -The command above will trigger a container image build and will apply the generated OpenShift resources, right after. +The aforementioned command will build a jar file locally, trigger a container image build and then apply the generated OpenShift resources. The generated resources are using OpenShift's `DeploymentConfig` that is configured to automatically trigger a redeployment when a change in the `ImageStream` is noticed. In other words, any container image build after the initial deployment will automatically trigger redeployment, without the need to delete, update or re-apply the generated resources. +To confirm the above command has created an image stream, a service resource and has deployed the application (has a pod running), apply these commands: + +[source,bash,subs=attributes+] +---- +oc get is +oc get pods +oc get svc +---- + +To expose the created service to a route and test it: + +[source,bash,subs=attributes+] +---- +oc expose svc/greeting +oc get routes +curl http:///greeting +---- + === Customizing All available customization options are available in the link:deploying-to-kubernetes#openshift[OpenShift configuration options]. @@ -113,25 +161,28 @@ Some examples are provided in the sections below: To expose a `Route` for the Quarkus application: -[source] +[source,properties] ---- -quarkus.openshift.expose=true +quarkus.openshift.route.expose=true ---- -Tip: You don't necessarily need to add this property in the `application.properties`. You can pass it as a command line argument: +[TIP] +==== +You don't necessarily need to add this property in the `application.properties`. You can pass it as a command line argument: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- -./mvnw clean package -Dquarkus.openshift.expose=true +./mvnw clean package -Dquarkus.openshift.route.expose=true ---- The same applies to all properties listed below. +==== ==== Labels To add a label in the generated resources: -[source] +[source,properties] ---- quarkus.openshift.labels.foo=bar ---- @@ -140,44 +191,157 @@ quarkus.openshift.labels.foo=bar To add an annotation in the generated resources: -[source] +[source,properties] ---- quarkus.openshift.annotations.foo=bar ---- +[#env-vars] ==== Environment variables -To add an annotation in the generated resources: +OpenShift provides multiple ways of defining environment variables: + +- key/value pairs +- import all values from a Secret or ConfigMap +- interpolate a single value identified by a given field in a Secret or ConfigMap +- interpolate a value from a field within the same resource + +===== Environment variables from key/value pairs -[source] +To add a key/value pair as an environment variable in the generated resources: + +[source,properties] ---- -quarkus.openshift.env-vars.my-env-var.value=foobar +quarkus.openshift.env.vars.my-env-var=foobar ---- The command above will add `MY_ENV_VAR=foobar` as an environment variable. Please note that the key `my-env-var` will be converted to uppercase and dashes will be replaced by underscores resulting in `MY_ENV_VAR`. -You may also noticed that in contrast to labels, and annotations for environment variables you don't just use a key=value approach. -That is because for environment variables there are additional options rather than just value. - ===== Environment variables from Secret -To add all key value pairs of a `Secret` as environment variables: +To add all key/value pairs of `Secret` as environment variables just apply the following configuration, separating each `Secret` +to be used as source by a comma (`,`): -[source] +[source,properties] ---- -quarkus.openshift.env-vars.my-env-var.secret=my-secret +quarkus.openshift.env.secrets=my-secret,my-other-secret +---- + +which would generate the following in the container definition: + +[source,yaml] +---- +envFrom: + - secretRef: + name: my-secret + optional: false + - secretRef: + name: my-other-secret + optional: false +---- + +The following extracts a value identified by the `keyName` field from the `my-secret` Secret into a `foo` environment variable: + +[source,properties] +---- +quarkus.openshift.env.mapping.foo.from-secret=my-secret +quarkus.openshift.env.mapping.foo.with-key=keyName +---- + +This would generate the following in the `env` section of your container: + +[source,yaml] +---- +- env: + - name: FOO + valueFrom: + secretKeyRef: + key: keyName + name: my-secret + optional: false ---- ===== Environment variables from ConfigMap -To add all key value pairs of a `ConfigMap` as environment variables: +To add all key/value pairs from `ConfigMap` as environment variables just apply the following configuration, separating each +`ConfigMap` to be used as source by a comma (`,`): + +[source,properties] +---- +quarkus.openshift.env.configmaps=my-config-map,another-config-map +---- + +which would generate the following in the container definition: -[source] +[source,yaml] ---- -quarkus.openshift.env-vars.my-env-var.configmap=my-secret +envFrom: + - configMapRef: + name: my-config-map + optional: false + - configMapRef: + name: another-config-map + optional: false ---- +The following extracts a value identified by the `keyName` field from the `my-config-map` ConfigMap into a `foo` +environment variable: + +[source,properties] +---- +quarkus.openshift.env.mapping.foo.from-configmap=my-configmap +quarkus.openshift.env.mapping.foo.with-key=keyName +---- + +This would generate the following in the `env` section of your container: + +[source,yaml] +---- +- env: + - name: FOO + valueFrom: + configMapRefKey: + key: keyName + name: my-configmap + optional: false +---- + +===== Environment variables from fields + +It's also possible to use the value from another field to add a new environment variable by specifying the path of the field to be used as a source, as follows: + +[source,properties] +---- +quarkus.openshift.env.fields.foo=metadata.name +---- + +===== Validation + +A conflict between two definitions, e.g. mistakenly assigning both a value and specifying that a variable is derived from a field, will result in an error being thrown at build time so that you get the opportunity to fix the issue before you deploy your application to your cluster where it might be more difficult to diagnose the source of the issue. + +Similarly, two redundant definitions, e.g. defining an injection from the same secret twice, will not cause an issue but will indeed report a warning to let you know that you might not have intended to duplicate that definition. + +[#env-vars-backwards] +===== Backwards compatibility + +Previous versions of the OpenShift extension supported a different syntax to add environment variables. The older syntax is still supported but is deprecated, and it's advised that you migrate to the new syntax. + +.Old vs. new syntax +|==== +| |Old | New | +| Plain variable |`quarkus.openshift.env-vars.my-env-var.value=foobar` | `quarkus.openshift.env.vars.my-env-var=foobar` | +| From field |`quarkus.openshift.env-vars.my-env-var.field=foobar` | `quarkus.openshift.env.fields.my-env-var=foobar` | +| All from `ConfigMap` |`quarkus.openshift.env-vars.xxx.configmap=foobar` | `quarkus.openshift.env.configmaps=foobar` | +| All from `Secret` |`quarkus.openshift.env-vars.xxx.secret=foobar` | `quarkus.openshift.env.secrets=foobar` | +| From one `Secret` field |`quarkus.openshift.env-vars.foo.secret=foobar` | `quarkus.openshift.env.mapping.foo.from-secret=foobar` | +| |`quarkus.openshift.env-vars.foo.value=field` | `quarkus.openshift.env.mapping.foo.with-key=field` | +| From one `ConfigMap` field |`quarkus.openshift.env-vars.foo.configmap=foobar` | `quarkus.openshift.env.mapping.foo.from-configmap=foobar` | +| |`quarkus.openshift.env-vars.foo.value=field` | `quarkus.openshift.env.mapping.foo.with-key=field` | +|==== + +NOTE: If you redefine the same variable using the new syntax while keeping the old syntax, **ONLY** the new version will be kept, and a warning will be issued to alert you of the problem. For example, if you define both +`quarkus.openshift.env-vars.my-env-var.value=foobar` and `quarkus.openshift.env.vars.my-env-var=newValue`, the extension will only generate an environment variable `MY_ENV_VAR=newValue` and issue a warning. ==== Mounting volumes @@ -185,7 +349,7 @@ The OpenShift extension allows the user to configure both volumes and mounts for Any volume can be mounted with a simple configuration: -[source] +[source,properties] ---- quarkus.openshift.mounts.my-volume.path=/where/to/mount ---- @@ -196,14 +360,38 @@ The volumes themselves can be configured as shown in the sections below: ===== Secret volumes -[source] +[source,properties] ---- quarkus.openshift.secret-volumes.my-volume.secret-name=my-secret ---- ===== ConfigMap volumes -[source] +[source,properties] ---- quarkus.openshift.config-map-volumes.my-volume.config-map-name=my-secret ---- + +=== Knative - OpenShift Serverless + +OpenShift also provides the ability to use Knative via the link:https://www.openshift.com/learn/topics/serverless[OpenShift Serverless] functionality. + +The first order of business is to instruct Quarkus to generate Knative resources by setting: + +[source,properties] +---- +quarkus.kubernetes.deployment-target=knative +---- + +In order to leverage OpenShift S2I to build the container image on the cluster and use the resulting container image for the Knative application, +we need to set a couple of configuration properties: + +[source,properties] +---- +# set the Kubernetes namespace which will be used to run the application +quarkus.container-image.group=geoand +# set the container image registry - this is the standard URL used to refer to the internal OpenShift registry +quarkus.container-image.registry=image-registry.openshift-image-registry.svc:5000 +---- + +The application can then be deployed to OpenShift Serverless by enabling the standard `quarkus.kubernetes.deploy=true` property. diff --git a/docs/src/main/asciidoc/dev-ui.adoc b/docs/src/main/asciidoc/dev-ui.adoc new file mode 100644 index 0000000000000..22d2404019a20 --- /dev/null +++ b/docs/src/main/asciidoc/dev-ui.adoc @@ -0,0 +1,402 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Dev UI + +include::./attributes.adoc[] + +This guide covers the Quarkus Dev UI for link:building-my-first-extension[extension authors]. + +Quarkus now ships with a new experimental Dev UI, which is available in dev mode (when you start +quarkus with `mvn quarkus:dev`) at http://localhost:8080/q/dev[/q/dev] by default. It will show you something like +this: + +image::dev-ui-overview.png[alt=Dev UI overview,role="center"] + +It allows you to quickly visualize all the extensions currently loaded, see their status and go directly +to their documentation. + +On top of that, each extension can add: + +- <> +- <> +- <> + +== How can I make my extension support the Dev UI? + +In order to make your extension listed in the Dev UI you don't need to do anything! + +So you can always start with that :) + +If you want to contribute badges or links in your extension card on the Dev UI overview +page, like this: + +image:dev-ui-embedded.png[alt=Dev UI embedded,role="center"] + +You have to add a file named `dev-templates/embedded.html` in your +link:building-my-first-extension#description-of-a-quarkus-extension[`deployment`] +extension module's resources: + +image::dev-ui-embedded-file.png[alt=Dev UI embedded.html] + +The contents of this file will be included in your extension card, so for example we can place +two links with some styling and icons: + +[source,html] +---- + + + OpenAPI +
    + + + Swagger UI +---- + +TIP: We use the Font Awesome Free icon set. + +Note how the paths are specified: `{config:http-path('quarkus.smallrye-openapi.path')}`. This is a special +directive that the quarkus dev console understands: it will replace that value with the resolved route +named 'quarkus.smallrye-openapi.path'. + +The corresponding non-application endpoint is declared using `.routeConfigKey` to associate the route with a name: + +[source,java] +---- + nonApplicationRootPathBuildItem.routeBuilder() + .route(openApiConfig.path) // <1> + .routeConfigKey("quarkus.smallrye-openapi.path") // <2> + ... + .build(); +---- +<1> The configured path is resolved into a valid route. +<2> The resolved route path is then associated with the key `quarkus.smallrye-openapi.path`. + +== Path considerations + +Paths are tricky business. Keep the following in mind: + +* Assume your UI will be nested under the dev endpoint. Do not provide a way to customize this without a strong reason. +* Never construct your own absolute paths. Adding a suffix to a known, normalized and resolved path is fine. + +Configured paths, like the `dev` endpoint used by the console or the SmallRye OpenAPI path shown in the example above, +need to be properly resolved against both `quarkus.http.root-path` and `quarkus.http.non-application-root-path`. +Use `NonApplicationRootPathBuildItem` or `HttpRootPathBuildItem` to construct endpoint routes and identify resolved +path values that can then be used in templates. + +The `{devRootAppend}` variable can also be used in templates to construct URLs for static dev console resources, for example: + +[source,html] +---- +Quarkus +---- + +Refer to the link:all-config#quarkus-vertx-http_quarkus.http.non-application-root-path[Quarkus Vertx HTTP configuration reference] +for details on how the non-application root path is configured. + +== Template and styling support + +Both the `embedded.html` files and any full page you add in `/dev-templates` will be interpreted by +link:qute[the Qute template engine]. + +This also means that you can link:qute-reference#user_tags[add custom Qute tags] in +`/dev-templates/tags` for your templates to use. + +The style system currently in use is https://getbootstrap.com/docs/4.6/getting-started/introduction/[Bootstrap V4 (4.6.0)] +but note that this might change in the future. + +The main template also includes https://jquery.com/[jQuery 3.5.1], but here again this might change. + +=== Accessing Config Properties + +A `config:property(name)` expression can be used to output the config value for the given property name. +The property name can be either a string literal or obtained dynamically by another expression. +For example `{config:property('quarkus.lambda.handler')}` and `{config:property(foo.propertyName)}`. + +Reminder: do not use this to retrieve raw configured path values. As shown above, use `{config:http-path(...)}` with +a known route configuration key when working with resource paths. + +== Adding full pages + +To add full pages for your Dev UI extension such as this one: + +image::dev-ui-page.png[alt=Dev UI custom page] + +You need to place them in your extension's +link:building-my-first-extension#description-of-a-quarkus-extension[`deployment`] module's +`/dev-templates` resource folder, like this page for the link:cache[`quarkus-cache` extension]: + +[[action-example]] +[source,java] +---- +{#include main}// <1> + {#style}// <2> + .custom { + color: gray; + } + {/style} + {#script} // <3> + $(document).ready(function(){ + $(function () { + $('[data-toggle="tooltip"]').tooltip() + }); + }); + {/script} + {#title}Cache{/title}// <4> + {#body}// <5> + + + + + + + + + {#for cacheInfo in info:cacheInfos}// <6> + + + + + {/for} + +
    NameSize
    + {cacheInfo.name} + +
    + enctype="application/x-www-form-urlencoded"> + + + +
    +
    + {/body} +{/include} +---- +<1> In order to benefit from the same style as other Dev UI pages, extend the `main` template +<2> You can pass extra CSS for your page in the `style` template parameter +<3> You can pass extra JavaScript for your page in the `script` template parameter. This will be added inline after the JQuery script, so you can safely use JQuery in your script. +<4> Don't forget to set your page title in the `title` template parameter +<5> The `body` template parameter will contain your content +<6> In order for your template to read custom information from your Quarkus extension, you can use + the `info` link:qute-reference#namespace_extension_methods[namespace]. +<7> This shows an <> + +== Linking to your full-page templates + +Full-page templates for extensions live under a pre-defined `{devRootAppend}/{groupId}.{artifactId}/` directory +that is referenced using the `urlbase` template parameter. Using configuration defaults, that would resolve to +`/q/dev/io.quarkus.quarkus-cache/`, as an example. + +Use the `{urlbase}` template parameter to reference this folder in `embedded.html`: + +[source,html] +---- +// <1> + + Caches {info:cacheInfos.size()} +---- +<1> Use the `urlbase` template parameter to reference full-page templates for your extension + +== Passing information to your templates + +In `embedded.html` or in full-page templates, you will likely want to display information that is +available from your extension. + +There are two ways to make that information available, depending on whether it is available at +build time or at run time. + +In both cases we advise that you add support for the Dev UI in your `{pkg}.deployment.devconsole` +package in a `DevConsoleProcessor` class (in your extension's +link:building-my-first-extension#description-of-a-quarkus-extension[`deployment`] module). + +=== Passing run-time information + +[source,java] +---- +package io.quarkus.cache.deployment.devconsole; + +import io.quarkus.cache.runtime.CaffeineCacheSupplier; +import io.quarkus.deployment.IsDevelopment; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.devconsole.spi.DevConsoleRuntimeTemplateInfoBuildItem; + +public class DevConsoleProcessor { + + @BuildStep(onlyIf = IsDevelopment.class)// <1> + public DevConsoleRuntimeTemplateInfoBuildItem collectBeanInfo() { + return new DevConsoleRuntimeTemplateInfoBuildItem("cacheInfos", + new CaffeineCacheSupplier());// <2> + } +} +---- +<1> Don't forget to make this link:building-my-first-extension#deploying-the-greeting-feature[build step] + conditional on being in dev mode +<2> Declare a run-time dev `info:cacheInfos` template value + +This will map the `info:cacheInfos` value to this supplier in your extension's +link:building-my-first-extension#description-of-a-quarkus-extension[`runtime module`]: + +[source,java] +---- +package io.quarkus.cache.runtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.function.Supplier; + +import io.quarkus.arc.Arc; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; + +public class CaffeineCacheSupplier implements Supplier> { + + @Override + public List get() { + List allCaches = new ArrayList<>(allCaches()); + allCaches.sort(Comparator.comparing(CaffeineCache::getName)); + return allCaches; + } + + public static Collection allCaches() { + // Get it from ArC at run-time + return (Collection) (Collection) + Arc.container().instance(CacheManagerImpl.class).get().getAllCaches(); + } +} +---- + +=== Passing build-time information + +Sometimes you only need build-time information to be passed to your template, so you can do it like this: + +[source,java] +---- +package io.quarkus.qute.deployment.devconsole; + +import java.util.List; + +import io.quarkus.deployment.IsDevelopment; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.devconsole.spi.DevConsoleTemplateInfoBuildItem; +import io.quarkus.qute.deployment.CheckedTemplateBuildItem; +import io.quarkus.qute.deployment.TemplateVariantsBuildItem; + +public class DevConsoleProcessor { + + @BuildStep(onlyIf = IsDevelopment.class) + public DevConsoleTemplateInfoBuildItem collectBeanInfo( + List checkedTemplates,// <1> + TemplateVariantsBuildItem variants) { + DevQuteInfos quteInfos = new DevQuteInfos(); + for (CheckedTemplateBuildItem checkedTemplate : checkedTemplates) { + DevQuteTemplateInfo templateInfo = + new DevQuteTemplateInfo(checkedTemplate.templateId, + variants.getVariants().get(checkedTemplate.templateId), + checkedTemplate.bindings); + quteInfos.addQuteTemplateInfo(templateInfo); + } + return new DevConsoleTemplateInfoBuildItem("devQuteInfos", quteInfos);// <2> + } + +} +---- +<1> Use whatever dependencies you need as input +<2> Declare a build-time `info:devQuteInfos` DEV template value + +== Advanced usage: adding actions + +You can also add actions to your Dev UI templates: + +image::dev-ui-interactive.png[alt=Dev UI interactive page] + +This can be done by adding another link:building-my-first-extension#deploying-the-greeting-feature[build step] to +declare the action in your extension's +link:building-my-first-extension#description-of-a-quarkus-extension[`deployment`] module: + + +[source,java] +---- +package io.quarkus.cache.deployment.devconsole; + +import static io.quarkus.deployment.annotations.ExecutionTime.STATIC_INIT; + +import io.quarkus.cache.runtime.devconsole.CacheDevConsoleRecorder; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.devconsole.spi.DevConsoleRouteBuildItem; + +public class DevConsoleProcessor { + + @BuildStep + @Record(value = STATIC_INIT, optional = true)// <1> + DevConsoleRouteBuildItem invokeEndpoint(CacheDevConsoleRecorder recorder) { + return new DevConsoleRouteBuildItem("caches", "POST", + recorder.clearCacheHandler());// <2> + } +} +---- +<1> Mark the recorder as optional, so it will only be invoked when in dev mode +<2> Declare a `POST {urlbase}/caches` route handled by the given handler + + +Note: you can see <>. + +Now all you have to do is implement the recorder in your extension's +link:building-my-first-extension#description-of-a-quarkus-extension[`runtime module`]: + + +[source,java] +---- +package io.quarkus.cache.runtime.devconsole; + +import io.quarkus.cache.runtime.CaffeineCacheSupplier; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.devconsole.runtime.spi.DevConsolePostHandler; +import io.quarkus.vertx.http.runtime.devmode.devconsole.FlashScopeUtil.FlashMessageStatus; +import io.vertx.core.Handler; +import io.vertx.core.MultiMap; +import io.vertx.ext.web.RoutingContext; + +@Recorder +public class CacheDevConsoleRecorder { + + public Handler clearCacheHandler() { + return new DevConsolePostHandler() {// <1> + @Override + protected void handlePost(RoutingContext event, MultiMap form) // <2> + throws Exception { + String cacheName = form.get("name"); + for (CaffeineCache cache : CaffeineCacheSupplier.allCaches()) { + if (cache.getName().equals(cacheName)) { + cache.invalidateAll(); + flashMessage(event, "Cache for " + cacheName + " cleared");// <3> + return; + } + } + flashMessage(event, "Cache for " + cacheName + " not found", + FlashMessageStatus.ERROR);// <4> + } + }; + } +} +---- +<1> While you can use https://vertx.io/docs/vertx-web/java/#_routing_by_http_method[any Vert.x handler], + the `DevConsolePostHandler` superclass will handle your POST actions + nicely, and auto-redirect to the `GET` URI right after your `POST` for optimal behavior. +<2> You can get the Vert.x `RoutingContext` as well as the `form` contents +<3> Don't forget to add a message for the user to let them know everything went fine +<4> You can also add error messages + + +NOTE: Flash messages are handled by the `main` DEV template and will result in nice notifications for your +users: + +image::dev-ui-message.png[alt=Dev UI message] + diff --git a/docs/src/main/asciidoc/elasticsearch.adoc b/docs/src/main/asciidoc/elasticsearch.adoc index 0fa996c2f6f40..dbc4ae998597e 100644 --- a/docs/src/main/asciidoc/elasticsearch.adoc +++ b/docs/src/main/asciidoc/elasticsearch.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Connecting to an Elasticsearch cluster include::./attributes.adoc[] @@ -49,14 +49,14 @@ The solution for the high level client is located in the `elasticsearch-rest-hig First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=elasticsearch-quickstart \ -DclassName="org.acme.elasticsearch.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jackson,elasticsearch-rest-client" + -Dextensions="resteasy,resteasy-jackson,elasticsearch-rest-client" cd elasticsearch-quickstart ---- @@ -203,8 +203,6 @@ Now, edit the `org.acme.elasticsearch.FruitResource` class as follows: [source,java] ---- @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { @Inject FruitService fruitService; @@ -262,6 +260,52 @@ quarkus.elasticsearch.hosts = localhost:9200 If you need a more advanced configuration, you can find the comprehensive list of supported configuration properties at the end of this guide. +== Programmatically Configuring Elasticsearch +On top of the parametric configuration, you can also programmatically apply additional configuration to the client by implementing a `RestClientBuilder.HttpClientConfigCallback` and annotating it with `ElasticsearchClientConfig`. You may provide multiple implementations and configuration provided by each implementation will be applied in a randomly ordered cascading manner. + +For example, when accessing an Elasticsearch cluster that is set up for TLS on the HTTP layer, the client needs to trust the certificate that Elasticsearch is using. The following is an example of setting up the client to trust the CA that has signed the certificate that Elasticsearch is using, when that CA certificate is available in a PKCS#12 keystore. + +[source,java] +---- +import io.quarkus.elasticsearch.restclient.lowlevel.ElasticsearchClientConfig; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.ssl.SSLContexts; +import org.elasticsearch.client.RestClientBuilder; + +import javax.enterprise.context.Dependent; +import javax.net.ssl.SSLContext; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.KeyStore; + +@ElasticsearchClientConfig +@Dependent +public class SSLContextConfigurator implements RestClientBuilder.HttpClientConfigCallback { + @Override + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + try { + String keyStorePass = "password-for-keystore"; + Path trustStorePath = Paths.get("/path/to/truststore.p12"); + KeyStore truststore = KeyStore.getInstance("pkcs12"); + try (InputStream is = Files.newInputStream(trustStorePath)) { + truststore.load(is, keyStorePass.toCharArray()); + } + SSLContextBuilder sslBuilder = SSLContexts.custom() + .loadTrustMaterial(truststore, null); + SSLContext sslContext = sslBuilder.build(); + httpClientBuilder.setSSLContext(sslContext); + } catch (Exception e) { + throw new RuntimeException(e); + } + + return httpClientBuilder; + } +} +---- +See https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/_encrypted_communication.html[Elasticsearch documentation] for more details on this particular example. == Running an Elasticsearch cluster @@ -270,7 +314,7 @@ if you have a local running instance on this port, there is nothing more to do b If you want to use Docker to run an Elasticsearch instance, you can use the following command to launch one: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- docker run --name elasticsearch -e "discovery.type=single-node" -e "ES_JAVA_OPTS=-Xms512m -Xmx512m"\ --rm -p 9200:9200 docker.elastic.co/elasticsearch/elasticsearch-oss:{elasticsearch-version} @@ -382,18 +426,18 @@ In this example you can note the following: == Hibernate Search Elasticsearch -Quarkus supports Hibernate Search with Elasticsearch via the `hibernate-search-elasticsearch` extension. +Quarkus supports Hibernate Search with Elasticsearch via the `hibernate-search-orm-elasticsearch` extension. Hibernate Search Elasticsearch allows to synchronize your JPA entities to an Elasticsearch cluster and offers a way to query your Elasticsearch cluster using the Hibernate Search API. -If you're interested in it, you can read the link:hibernate-search-elasticsearch[Hibernate Search with Elasticsearch guide]. +If you're interested in it, you can read the link:hibernate-search-orm-elasticsearch[Hibernate Search with Elasticsearch guide]. == Cluster Health Check If you are using the `quarkus-smallrye-health` extension, both the extension will automatically add a readiness health check to validate the health of the cluster. -So when you access the `/health/ready` endpoint of your application you will have information about the cluster status. +So when you access the `/q/health/ready` endpoint of your application you will have information about the cluster status. It uses the cluster health endpoint, the check will be down if the status of the cluster is **red**, or the cluster is not available. This behavior can be disabled by setting the `quarkus.elasticsearch.health.enabled` property to `false` in your `application.properties`. @@ -404,7 +448,7 @@ You can use both clients in a native executable. You can build a native executable with the usual command `./mvnw package -Pnative`. -Running it is as simple as executing `./target/elasticsearch-low-level-client-quickstart-1.0-SNAPSHOT-runner`. +Running it is as simple as executing `./target/elasticsearch-low-level-client-quickstart-1.0.0-SNAPSHOT-runner`. You can then point your browser to `http://localhost:8080/fruits.html` and use your application. diff --git a/docs/src/main/asciidoc/flyway.adoc b/docs/src/main/asciidoc/flyway.adoc index da3f622b72de8..be18403db2377 100644 --- a/docs/src/main/asciidoc/flyway.adoc +++ b/docs/src/main/asciidoc/flyway.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Flyway @@ -27,7 +27,7 @@ In your `pom.xml`, add the following dependencies: * your JDBC driver extension (`quarkus-jdbc-postgresql`, `quarkus-jdbc-h2`, `quarkus-jdbc-mariadb`, ...) [source,xml] --- +---- @@ -41,7 +41,7 @@ In your `pom.xml`, add the following dependencies: quarkus-jdbc-postgresql --- +---- Flyway support relies on the Quarkus datasource config. It can be customized for the default datasource as well as for every <>. @@ -55,7 +55,7 @@ include::{generated-dir}/config/quarkus-flyway.adoc[opts=optional, leveloffset=+ The following is an example for the `{config-file}` file: [source,properties] --- +---- # configure your datasource quarkus.datasource.db-kind=postgresql quarkus.datasource.username=sarah @@ -75,13 +75,12 @@ quarkus.flyway.migrate-at-start=true # quarkus.flyway.locations=db/location1,db/location2 # quarkus.flyway.sql-migration-prefix=X # quarkus.flyway.repeatable-sql-migration-prefix=K - --- +---- Add a SQL migration to the default folder following the Flyway naming conventions: `{migrations-path}/V1.0.0__Quarkus.sql` [source,sql] --- +---- CREATE TABLE quarkus ( id INT, @@ -89,12 +88,12 @@ CREATE TABLE quarkus ); INSERT INTO quarkus(id, name) VALUES (1, 'QUARKED'); --- +---- Now you can start your application and Quarkus will run the Flyway's migrate method according to your config: [source,java] --- +---- @ApplicationScoped public class MigrationService { // You can Inject the object if you want to use it manually @@ -106,7 +105,7 @@ public class MigrationService { System.out.println(flyway.info().current().getVersion().toString()); } } --- +---- <1> Inject the Flyway object if you want to use it directly @@ -116,23 +115,20 @@ Flyway can be configured for multiple datasources. The Flyway properties are prefixed exactly the same way as the named datasources, for example: [source,properties] --- +---- quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.users.db-kind=h2 quarkus.datasource.users.username=username1 quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 quarkus.datasource.users.jdbc.max-size=11 quarkus.datasource.inventory.db-kind=h2 quarkus.datasource.inventory.username=username2 quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 quarkus.datasource.inventory.jdbc.max-size=12 # Flyway configuration for the default datasource @@ -149,7 +145,7 @@ quarkus.flyway.users.migrate-at-start=true quarkus.flyway.inventory.schemas=INVENTORY_TEST_SCHEMA quarkus.flyway.inventory.locations=db/inventory/location1,db/inventory/location2 quarkus.flyway.inventory.migrate-at-start=true --- +---- Notice there's an extra bit in the key. The syntax is as follows: `quarkus.flyway.[optional name.][datasource property]`. @@ -164,7 +160,7 @@ NOTE: If you enabled the `quarkus.flyway.migrate-at-start` property, by the time Quarkus will already have run the migrate operation [source,java] --- +---- @ApplicationScoped public class MigrationService { // You can Inject the object if you want to use it manually @@ -187,7 +183,7 @@ public class MigrationService { System.out.println(flyway.info().current().getVersion().toString()); } } --- +---- <1> Inject the Flyway object if you want to use it directly <2> Inject Flyway for named datasources using the Quarkus `FlywayDataSource` qualifier diff --git a/docs/src/main/asciidoc/funqy-amazon-lambda-http.adoc b/docs/src/main/asciidoc/funqy-amazon-lambda-http.adoc index 5492fb09481f2..8258479ed3c54 100644 --- a/docs/src/main/asciidoc/funqy-amazon-lambda-http.adoc +++ b/docs/src/main/asciidoc/funqy-amazon-lambda-http.adoc @@ -1,9 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Funqy HTTP with Amazon Lambda  += Quarkus - Funqy HTTP Binding with Amazon Lambda  :extension-status: preview include::./attributes.adoc[] diff --git a/docs/src/main/asciidoc/funqy-amazon-lambda.adoc b/docs/src/main/asciidoc/funqy-amazon-lambda.adoc index ab2f312e85105..3f003d2bf9087 100644 --- a/docs/src/main/asciidoc/funqy-amazon-lambda.adoc +++ b/docs/src/main/asciidoc/funqy-amazon-lambda.adoc @@ -1,9 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Funqy Amazon Lambdas += Quarkus - Funqy Amazon Lambda Binding :extension-status: preview include::./attributes.adoc[] @@ -51,7 +51,7 @@ environments and AWS Lambda is one of them. The Java code is actually the same Only one Funqy function can be exported per Amazon Lambda deployment. If you have multiple functions defined within your project, then you will need to choose the function within your Quarkus `application.properties`: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- quarkus.funqy.export=greet ---- @@ -70,7 +70,7 @@ at build time. Build the project using maven. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package ---- @@ -84,7 +84,7 @@ a lambda with AWS CLI. Specifically, make sure you have created an `Execution R a `LAMBDA_ROLE_ARN` environment variable in your profile or console window, Alternatively, you can edit the `manage.sh` script that is generated by the build and put the role value directly there: -[source] +[source,bash] ---- LAMBDA_ROLE_ARN="arn:aws:iam::1234567890:role/lambda-role" ---- @@ -113,21 +113,21 @@ and setup the `LAMBDA_ROLE_ARN` environment variable (as described above), pleas A usage statement will be printed to guide you accordingly. To see the `usage` statement, and validate AWS configuration: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh ---- You can `create` your function using the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh create ---- or if you do not have `LAMBDA_ROLE_ARN` already defined in this shell: -[source] +[source,bash] ---- LAMBDA_ROLE_ARN="arn:aws:iam::1234567890:role/lambda-role" sh target/manage.sh create ---- @@ -138,13 +138,13 @@ This special handler is Funqy's integration point with AWS Lambda. If there are any problems creating the function, you must delete it with the `delete` function before re-running the `create` command. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh delete ---- Commands may also be stacked: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh delete create ---- @@ -153,7 +153,7 @@ sh target/manage.sh delete create Use the `invoke` command to invoke your function. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh invoke ---- @@ -163,14 +163,14 @@ in the root directory of the project. The function can also be invoked locally with the SAM CLI like this: -[source] +[source,bash] ---- sam local invoke --template target/sam.jvm.yaml --event payload.json ---- If you are working with your native image build, simply replace the template name with the native version: -[source] +[source,bash] ---- sam local invoke --template target/sam.native.yaml --event payload.json ---- @@ -180,7 +180,7 @@ sam local invoke --template target/sam.native.yaml --event payload.json You can update the Java code as you see fit. Once you've rebuilt, you can redeploy your function by executing the `update` command. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh update ---- @@ -192,7 +192,7 @@ code to a native executable. Just make sure to rebuild your project with the `- For Linux hosts execute: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn package -Pnative ---- @@ -201,7 +201,7 @@ NOTE: If you are building on a non-Linux system, you will need to also pass in a Lambda requires linux binaries. You can do this by passing this property to your Maven build: `-Dnative-image.docker-build=true`, or for Gradle: `--docker-build=true`. This requires you to have docker installed locally, however. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean install -Pnative -Dnative-image.docker-build=true ---- @@ -213,7 +213,7 @@ Custom (Provided) Runtime. The instructions here are exactly as above with one change: you'll need to add `native` as the first parameter to the `manage.sh` script: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- sh target/manage.sh native create ---- @@ -228,7 +228,7 @@ to create, delete, and update your functions. One thing to note about the create command for native is that the `aws lambda create-function` call must set a specific environment variable: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- --environment 'Variables={DISABLE_SIGNAL_HANDLERS=true}' ---- @@ -279,21 +279,21 @@ A starter template has been generated for both JVM and native execution modes. Run the following SAM CLI command to locally test your function, passing the appropriate SAM `template`. The `event` parameter takes any JSON file, in this case the sample `payload.json`. -[source] +[source,bash] ---- sam local invoke --template target/sam.jvm.yaml --event payload.json ---- The native image can also be locally tested using the `sam.native.yaml` template: -[source] +[source,bash] ---- sam local invoke --template target/sam.native.yaml --event payload.json ---- == Modifying `function.zip` -The are times where you may have to add additional entries to the `function.zip` lambda deployment that is generated +There are times where you may have to add additional entries to the `function.zip` lambda deployment that is generated by the build. To do this create a `zip.jvm` or `zip.native` directory within `src/main`. Create `zip.jvm/` if you are doing a pure Java. `zip.native/` if you are doing a native deployment. diff --git a/docs/src/main/asciidoc/funqy-azure-functions-http.adoc b/docs/src/main/asciidoc/funqy-azure-functions-http.adoc index 0f7124745a6fa..2952aed2afabc 100644 --- a/docs/src/main/asciidoc/funqy-azure-functions-http.adoc +++ b/docs/src/main/asciidoc/funqy-azure-functions-http.adoc @@ -1,9 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Funqy Azure Functions += Quarkus - Funqy HTTP Binding with Azure Functions :extension-status: preview include::./attributes.adoc[] diff --git a/docs/src/main/asciidoc/funqy-gcp-functions-http.adoc b/docs/src/main/asciidoc/funqy-gcp-functions-http.adoc new file mode 100644 index 0000000000000..1b9c89c4cb3e7 --- /dev/null +++ b/docs/src/main/asciidoc/funqy-gcp-functions-http.adoc @@ -0,0 +1,55 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Funqy HTTP Binding with Google Cloud Functions +:extension-status: experimental + +include::./attributes.adoc[] + +If you want to allow HTTP clients to invoke your Funqy functions on Google Cloud Functions, Quarkus allows you to expose multiple +Funqy functions through HTTP deployed as one Google Cloud Function. This approach does add overhead over the +regular Funqy Google Cloud Function integration. + +include::./status-include.adoc[] + +Follow the link:gcp-functions-http[Google Cloud Functions Http Guide]. It walks through using a variety of HTTP +frameworks on Google Cloud Functions, including Funqy. + +WARNING: The Funqy HTTP + Google Cloud Functions binding is not a replacement for REST over HTTP. Because Funqy +needs to be portable across a lot of different protocols and function providers its HTTP binding +is very minimalistic and you will lose REST features like linking and the ability to leverage +HTTP features like cache-control and conditional GETs. You may want to consider using Quarkus's +JAX-RS, Spring MVC, or Vert.x Web Reactive Route link:gcp-functions-http[support] instead. They also work with Quarkus and Google Cloud Functions. + +== An additional Quickstart + +Beyond generating a Google Cloud Functions project that is covered in the link:gcp-functions-http[Google Cloud Functions HTTP Guide], +there's also a quickstart for running Funqy HTTP on Google Cloud Functions. + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `funqy-google-cloud-functions-http-quickstart` {quickstarts-tree-url}/funqy-quickstarts/funqy-google-cloud-functions-http-quickstart[directory]. + +== The Code + +There is nothing special about the code and more importantly nothing Google Cloud specific. Funqy functions can be deployed to many different +environments and Google Cloud Functions is one of them. The Java code is actually the same exact code as the {quickstarts-tree-url}/funqy-quickstarts/funqy-http-quickstart[funqy-http-quickstart]. + +== Getting Started + +The steps to get this quickstart running are exactly the same as defined in the link:gcp-functions-http[Google Cloud Functions HTTP Guide]. +This differences are that you are running from a quickstart and the Maven dependencies are slightly different. + +[source, xml] +---- + + io.quarkus + quarkus-funqy-http + + + io.quarkus + quarkus-google-cloud-functions-http + +---- diff --git a/docs/src/main/asciidoc/funqy-gcp-functions.adoc b/docs/src/main/asciidoc/funqy-gcp-functions.adoc index 50df2cb7fec52..7f9bd88483fed 100644 --- a/docs/src/main/asciidoc/funqy-gcp-functions.adoc +++ b/docs/src/main/asciidoc/funqy-gcp-functions.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Funqy Google Cloud Functions :extension-status: experimental @@ -28,18 +28,46 @@ To complete this guide, you need: Login to Google Cloud is necessary for deploying the application and it can be done as follows: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud auth login ---- At the time of this writing, Cloud Functions are still in beta so make sure to install the `beta` command group. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud components install beta ---- +== The Quickstart + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `funqy-google-cloud-functions-quickstart` {quickstarts-tree-url}/funqy-quickstarts/funqy-google-cloud-functions-quickstart[directory]. + +== Creating the Maven Deployment Project + +Create an application with the `quarkus-funqy-google-cloud-functions` extension. +You can use the following Maven command to create it: + +[source,bash,subs=attributes+] +---- +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ + -DprojectGroupId=org.acme \ + -DprojectArtifactId=funqy-google-cloud-functions \ + -DclassName="org.acme.quickstart.GreetingResource" \ + -Dpath="/hello" \ + -Dextensions="resteasy,funqy-google-cloud-functions" +---- + +Now, let's remove what's not needed inside the generated application: + +- Remove the dependency `io.quarkus:quarkus-reasteasy` from your `pom.xml` file. +- Remove the generated `org.acme.quickstart.GreetingResource` class. +- Remove the `index.html` from `resources/META-INF/resources` or it will be picked up instead of your Function. +- Remove the existing tests. + == The Code There is nothing special about the code and more importantly nothing Google Cloud specific. Funqy functions can be deployed to many different @@ -52,7 +80,7 @@ Only one Funqy function can be exported per Google Cloud Functions deployment. annotated with `@Funq` in your project, then there is no worries. If you have multiple functions defined within your project, then you will need to choose the function within your Quarkus `application.properties`: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- quarkus.funqy.export=greet ---- @@ -63,7 +91,7 @@ Alternatively, you can set the `QUARKUS_FUNQY_EXPORT` environment variable when Build the project using maven. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package ---- @@ -86,16 +114,15 @@ import io.quarkus.funqy.gcp.functions.event.StorageEvent; public class GreetingFunctions { - @Inject - GreetingService service; + @Inject GreetingService service; // <1> - @Funq // <1> + @Funq // <2> public void helloPubSubWorld(PubsubMessage pubSubEvent) { - String message = service.hello("world"); + String message = service.hello(pubSubEvent.data); System.out.println(pubSubEvent.messageId + " - " + message); } - @Funq // <2> + @Funq // <3> public void helloGCSWorld(StorageEvent storageEvent) { String message = service.hello("world"); System.out.println(storageEvent.name + " - " + message); @@ -106,9 +133,10 @@ public class GreetingFunctions { NOTE: Function return type can also be Mutiny reactive types. -1. This is a background function that takes as parameter a `io.quarkus.funqy.gcp.functions.event.PubsubMessage`, +1. Injection works inside your function. +2. This is a background function that takes as parameter a `io.quarkus.funqy.gcp.functions.event.PubsubMessage`, this is a convenient class to deserialize a PubSub message. -2. This is a background function that takes as parameter a `io.quarkus.funqy.gcp.functions.event.StorageEvent`, +3. This is a background function that takes as parameter a `io.quarkus.funqy.gcp.functions.event.StorageEvent`, this is a convenient class to deserialize a Google Storage event. NOTE: we provide convenience class to deserialize common Google Cloud event inside the `io.quarkus.funqy.gcp.functions.event` package. @@ -118,7 +146,7 @@ As our project contains multiple function, we need to specify which function nee [source,property] ---- -quarkus.funqy.export=helloHttpWorld +quarkus.funqy.export=helloPubSubWorld ---- == Build and Deploy to Google Cloud @@ -131,9 +159,11 @@ Then you will be able to use `gcloud` to deploy your function to Google Cloud, t [WARNING] ==== The first time you launch the `gcloud beta functions deploy`, you can have the following error message: -``` + +[source] +---- ERROR: (gcloud.beta.functions.deploy) OperationError: code=7, message=Build Failed: Cloud Build has not been used in project before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/api/cloudbuild.googleapis.com/overview?project= then retry. -``` +---- This means that Cloud Build is not activated yet. To overcome this error, open the URL shown in the error, follow the instructions and then wait a few minutes before retrying the command. ==== @@ -141,7 +171,7 @@ This means that Cloud Build is not activated yet. To overcome this error, open t Use this command to deploy to Google Cloud Functions: -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-funky-pubsub \ --entry-point=io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction \ @@ -157,9 +187,9 @@ define that this function will be triggered by all message publication inside th To trigger an event to this function, you can use the `gcloud functions call` command: -[source] +[source,bash] ---- -gcloud functions call quarkus-example-funky-pubsub --data '{"data":"Hello, Pub/Sub"}' +gcloud functions call quarkus-example-funky-pubsub --data '{"data":"Pub/Sub"}' ---- The `--data '{"data":"Hello, Pub/Sub"}'` option allow to specify the message to be send to PubSub. @@ -168,14 +198,14 @@ The `--data '{"data":"Hello, Pub/Sub"}'` option allow to specify the message to Before deploying your function, you need to create a bucket. -[source] +[source,bash] ---- gsutil mb gs://quarkus-hello ---- Then, use this command to deploy to Google Cloud Functions: -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-funky-storage \ --entry-point=io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction \ @@ -191,9 +221,9 @@ define that this function will be triggered by all new file inside this bucket. To trigger an event to this function, you can use the `gcloud functions call` command: -[source] +[source,bash] ---- -gcloud functions call quarkus-example-funky-pubsub --data '{"name":"test.txt"}' +gcloud functions call quarkus-example-funky-storage --data '{"name":"test.txt"}' ---- The `--data '{"name":"test.txt"}'` option allow to specify a fake file name, a fake Cloud Storage event will be created for this name. @@ -206,31 +236,35 @@ The easiest way to locally test your function is using the Cloud Function invoke You can download it via Maven using the following command: -[source] +[source,bash] ---- mvn dependency:copy \ -Dartifact='com.google.cloud.functions.invoker:java-function-invoker:1.0.0-beta1' \ -DoutputDirectory=. ---- +Before using the invoker, you first need to build your function via `mvn package`. + Then you can use it to launch your function locally, again, the command depends on the type of function and the type of events. === Background Functions - PubSub For background functions, you launch the invoker with a target class of `io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction`. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/funqy-google-cloud-functions-1.0-SNAPSHOT-runner.jar \ + --classpath target/funqy-google-cloud-functions-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Then you can call your background function via an HTTP call with a payload containing the event: -[source] +[source,bash] ---- -curl localhost:8080 -d '{"data":{"data":"hello"}}' +curl localhost:8080 -d '{"data":{"data":"world"}}' ---- This will call your PubSub background function with a PubSubMessage `{"data":"hello"}`. @@ -239,30 +273,24 @@ This will call your PubSub background function with a PubSubMessage `{"data":"he For background functions, you launch the invoker with a target class of `io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction`. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/funqy-google-cloud-functions-1.0-SNAPSHOT-runner.jar \ + --classpath target/funqy-google-cloud-functions-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.funqy.gcp.functions.FunqyBackgroundFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Then you can call your background function via an HTTP call with a payload containing the event: -[source] +[source,bash] ---- curl localhost:8080 -d '{"data":{"name":"text"}}' ---- This will call your PubSub background function with a Cloud Storage event `{"name":"file.txt"}`, so an event on the `file.txt` file. - -== Deploying HTTP Functions via Funqy - -You can use link:funqy-http[Funqy HTTP] on Google Cloud Functions. -This allows you to invoke on multiple Funqy functions using HTTP deployed as one Google Cloud Function. - -For this you need to include both `quarkus-funqy-http` and `quarkus-google-cloud-functions` extension. - == What's next? If you are looking for JAX-RS, Servlet or Vert.x support for Google Cloud Functions, we have it thanks to our link:gcp-functions-http[Google Cloud Functions HTTP binding]. diff --git a/docs/src/main/asciidoc/funqy-http.adoc b/docs/src/main/asciidoc/funqy-http.adoc index 80ec9123a566a..008d04499e9d5 100644 --- a/docs/src/main/asciidoc/funqy-http.adoc +++ b/docs/src/main/asciidoc/funqy-http.adoc @@ -1,9 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Funqy HTTP (Standalone) += Quarkus - Funqy HTTP Binding (Standalone) include::./attributes.adoc[] :extension-status: preview @@ -39,9 +39,21 @@ The solution is located in the `funqy-http-quickstart` {quickstarts-tree-url}/fu If you look at the Java code, you'll see that there is no HTTP specific API. Its just simple Java methods annotated with `@Funq`. Simple, easy, straightforward. +== Maven Dependencies + +To write Funqy HTTP functions, simply include the `quarkus-funqy-http` dependency into your Quarkus `pom.xml` file: + +[source, xml] +---- + + io.quarkus + quarkus-funqy-http + +---- + == Build Project -[source, shell] +[source,bash] ---- mvn clean quarkus:dev ---- @@ -50,15 +62,13 @@ This starts your functions in Quarkus dev mode. == Execute Funqy HTTP functions -Funqy HTTP input and output is always JSON. The Jackson JSON parser is used behind the scenes and you can -use Jackson annotations to fine tune your JSON mappings for your input and output objects. - -The HTTP POST method can be used to execute any Funqy function. HTTP GET can be used if your function does -not have any input. No other HTTP method can be used besides POST and GET. - The URL path to execute a function is the function name. For example if your function name is `foo` then the URL path to execute the function would be `/foo`. +The HTTP POST or GET methods can be used to invoke on a function. The return value of the function +is marshalled to JSON using the Jackson JSON library. Jackson annotations can be used. If your function +has an input parameter, a POST invocation must use JSON as the input type. Jackson is also used here for unmarshalling. + You can invoke the `hello` function defined in {quickstarts-tree-url}/funqy-quickstarts/funqy-http-quickstart/src/main/java/org/acme/funqy/PrimitiveFunctions.java[PrimitiveFunctions.java] by pointing your browser to http://localhost:8080/hello @@ -66,7 +76,7 @@ Invoking the other functions in the quickstart requires an HTTP POST. To execute the `greet` function defined in {quickstarts-tree-url}/funqy-quickstarts/funqy-http-quickstart/src/main/java/org/acme/funqy/GreetingFunction.java[GreetingFunction.java] invoke this curl script. -[source, shell] +[source,bash] ---- curl "http://localhost:8080/greet" \ -X POST \ @@ -78,7 +88,7 @@ Primitive types can also be passed as input using the standard JSON mapping for To execute the `toLowerCase` function defined in {quickstarts-tree-url}/funqy-quickstarts/funqy-http-quickstart/src/main/java/org/acme/funqy/PrimitiveFunctions.java[PrimitiveFunctions.java] invoke this curl script: -[source, shell] +[source,bash] ---- curl "http://localhost:8080/toLowerCase" \ -X POST \ @@ -89,7 +99,7 @@ curl "http://localhost:8080/toLowerCase" \ To execute the `double` function defined in {quickstarts-tree-url}/funqy-quickstarts/funqy-http-quickstart/src/main/java/org/acme/funqy/PrimitiveFunctions.java[PrimitiveFunctions.java] invoke this curl script: -[source, shell] +[source,bash] ---- curl "http://localhost:8080/double" \ -X POST \ @@ -97,15 +107,198 @@ curl "http://localhost:8080/double" \ -d '2' ---- -== Maven Dependencies +== GET Query Parameter Mapping -To write Funqy HTTP functions, simply include the `quarkus-funqy-http` dependency into your Quarkus `pom.xml` file: +For GET requests, the Funqy HTTP Binding also has a query parameter mapping for function input parameters. +Only bean style classes and `java.util.Map` can be used for your input parameter. For bean style +classes, query parameter names are mapped to properties on the bean class. Here's an example of a simple +`Map`: -[source, xml] +[source, java] ---- - - io.quarkus - quarkus-funqy-http - +@Funq +public String hello(Map map) { +... +} +---- + +Key values must be a primitive type (except char) or `String`. Values can be primitives (except char), `String`, `OffsetDateTime` or a complex +bean style class. For the above example, here's the corresponding curl request: + +[source,bash] +---- +curl "http://localhost:8080/a=1&b=2" +---- + +The `map` input parameter of the `hello` function would have the key value pairs: `a`->1, `b`->2. + +Bean style classes can also be use as the input parameter type. Here's an example: + +[source, java] +---- +public class Person { + String first; + String last; + + public String getFirst() { return first; } + public void setFirst(String first) { this.first = first; } + public String getLast() { return last; } + public void setLast(String last) { this.last = last; } +} + +public class MyFunctions { + @Funq + public String greet(Person p) { + return "Hello " + p.getFirst() + " " + p.getLast(); + } +} +---- + +Property values can be any primitive type except `char`. It can also be `String`, and `OffsetDateTime`. +`OffsetDateTime` query param values must be in ISO-8601 format. + +You can invoke on this using an HTTP GET and query parameters: + +[source,bash] +---- +curl "http://localhost:8080/greet?first=Bill&last=Burke" +---- + +In the above request, the query parameter names are mapped to corresponding properties in the input class. + +The input class can also have nested bean classes. Expanding on the previous example: + +[source, java] +---- +public class Family { + private Person dad; + private Person mom; + + public Person getDad() { return dad; } + public void setDad(Person dad) { this.dad = dad; } + public Person getMom() { return mom; } + public void setMom(Person mom) { this.mom = mom; } +} + +public class MyFunctions { + @Funq + public String greet(Family family) { + ... + } +} + ---- +In this case, query parameters for nested values use the `.` notation. For example: + +[source,bash] +---- +curl "http://localhost:8080/greet?dad.first=John&dad.last=Smith&mom.first=Martha&mom.last=Smith" +---- + +`java.util.List` and `Set` are also supported as property values. For example: + +[source, java] +---- +public class Family { + ... + + List pets; +} + +public class MyFunctions { + @Funq + public String greet(Family family) { + ... + } +} + +---- + +To invoke a GET request, just list the `pets` query parameter multiple times. + +[source,bash] +---- +curl "http://localhost:8080/greet?pets=itchy&pets=scratchy" +---- + +For more complex types, `List` and `Set` members must have an identifier in the +query parameter. For example: + +[source, java] +---- +public class Family { + ... + + List kids; +} + +public class MyFunctions { + @Funq + public String greet(Family family) { + ... + } +} + +---- + +Each `kids` query parameter must identify the kid they are referencing so that +the runtime can figure out which +property values go to which members in the list. Here's the curl request: + +[source,bash] +---- +curl "http://localhost:8080/greet?kids.1.first=Buffy&kids.2.first=Charlie" +---- + +The above URL uses the value `1` and `2` to identity the target member of the list, but any unique +string can be used. + +A property can also be a `java.util.Map`. The key of the map can be any primitive type and `String`. +For example: + +[source, java] +---- +public class Family { + ... + + Map address; +} + +public class MyFunctions { + @Funq + public String greet(Family family) { + ... + } +} +---- + +The corresponding call would look like this: + +[source,bash] +---- +curl "http://localhost:8080/greet?address.state=MA&address.city=Boston" +---- + +If your `Map` value is a complex type, then just continue the notation by adding the property to set at the end. + +[source, java] +---- +public class Family { + ... + + Map addresses; +} + +public class MyFunctions { + @Funq + public String greet(Family family) { + ... + } +} +---- + +[source,bash] +---- +curl "http://localhost:8080/greet?addresses.home.state=MA&addresses.home.city=Boston" +---- diff --git a/docs/src/main/asciidoc/funqy-knative-events.adoc b/docs/src/main/asciidoc/funqy-knative-events.adoc index 48529f1e44079..f33d010f6faa1 100644 --- a/docs/src/main/asciidoc/funqy-knative-events.adoc +++ b/docs/src/main/asciidoc/funqy-knative-events.adoc @@ -1,9 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// -= Quarkus - Funqy Knative Events += Quarkus - Funqy Knative Events Binding include::./attributes.adoc[] :extension-status: preview @@ -28,7 +28,7 @@ To complete this guide, you need: Setting up Knative locally in a Minikube environment is beyond the scope of this guide. It is advised to follow https://redhat-developer-demos.github.io/knative-tutorial/knative-tutorial/index.html[this] Knative Tutorial -put together by Red Hat. It walks through how to set up Knative on Minikube or Openshift in a local environment. +put together by Red Hat. It walks through how to set up Knative on Minikube or OpenShift in a local environment. NOTE: Specifically you should run the link:https://redhat-developer-demos.github.io/knative-tutorial/knative-tutorial-eventing/eventing-trigger-broker.html[Brokers and Triggers] tutorial as this guide requires that you can invoke on a Broker to trigger the quickstart code. @@ -107,7 +107,7 @@ public class SimpleFunctionChain { The `configChain` function has its Cloud Event mapping changed by configuration within {quickstarts-tree-url}/funqy-quickstarts/funqy-knative-events-quickstart/src/main/resources/application.properties[application.properties]. -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- quarkus.funqy.knative-events.mapping.configChain.trigger=defaultChain.output quarkus.funqy.knative-events.mapping.configChain.response-type=annotated @@ -207,14 +207,14 @@ Funqy also works with Quarkus Dev mode! First build the Java artifacts: -[source, shell] +[source,bash] ---- mvn clean install ---- Next, a docker image is required by Knative, so you'll need to build that next: -[source, shell] +[source,bash] ---- docker build -f src/main/docker/Dockerfile.jvm -t yourAccountName/funqy-knative-events-quickstart . ---- @@ -224,17 +224,29 @@ Dockerfile is a standard Quarkus dockerfile. No special Knative magic. Push your image to docker hub or quay -[source, shell] +[source,bash] ---- docker push yourAccountName/funqy-knative-events-quickstart ---- Again, make sure to replace `yourAccountName` with your docker or quay account name when you run `docker push`. -== Deploy to Kubernetes/Openshift +== Deploy to Kubernetes/OpenShift + +The first step is to set up the broker in our namespace. +Following is an example command from the the Knative cli. + +[source, yaml] +---- +kn broker create default \ + --namespace knativetutorial +---- + +The broker we have created is called `default`, this broker will receive the cloud events. +The broker is also referenced in the function YAML files. -The first step is to define a Kubernetes/Openshift service to points to your the docker image you created and pushed -during build. Take a look at {quickstarts-tree-url}/funqy-quickstarts/funqy-knative-events-quickstart/src/main/k8s/funqy-service.yaml[funqy-service.yaml] +The second step is to define a Kubernetes/OpenShift service to point to the Docker image you created and pushed +during build. Take a look at {quickstarts-tree-url}/funqy-quickstarts/funqy-knative-events-quickstart/src/main/k8s/funqy-service.yaml[funqy-service.yaml] [source, yaml] ---- @@ -253,22 +265,22 @@ spec: - image: docker.io/yourAccountName/funqy-knative-events-quickstart ---- -This is a standard Kubernetes service definition yaml file. +This is a standard Kubernetes service definition YAML file. -NOTE: Make sure you change the image url to point to the image you built and pushed earlier! +NOTE: Make sure you change the image URL to point to the image you built and pushed earlier! -For our quickstart, one Kubernetes service will contain all functions. There's no reason you couldn't break up this +For our quickstart, one Kubernetes service will contain all functions. There's no reason you couldn't break up this quickstart into multiple different projects and deploy a service for each function. For simplicity, and to show that you don't have to have a deployment per function, the quickstart combines everything into one project, image, and service. -Deploy the service yaml. +Deploy the service: -[source, shell] +[source,bash] ---- kubectl apply -n knativetutorial -f src/main/k8s/funqy-service.yaml ---- -The next step is to deploy Knative Event triggers for each of the event types. As noted in the code section, each +The next step is to deploy Knative Event triggers for each of the event types. As noted in the code section, each Funqy function is mapped to a specific Cloud Event type. You must create Knative Event triggers that map a Cloud Event and route it to a specific Kubernetes service. We have 4 different triggers. @@ -281,6 +293,7 @@ kind: Trigger metadata: name: defaultchain spec: + broker: default filter: attributes: type: defaultChain @@ -294,13 +307,13 @@ spec: The `spec:filter:attributes:type` maps a Cloud Event type to the Kubernetes service defined in `spec:subscriber:ref`. When a Cloud Event is pushed to the Broker, it will trigger the spin up of the service mapped to that event. -There's a trigger yaml file for each of our 4 Funqy functions. Deploy them all: +There's a trigger YAML file for each of our 4 Funqy functions. Deploy them all: -[source, shell] +[source,bash] ---- kubectl apply -n knativetutorial -f src/main/k8s/defaultChain-trigger.yaml kubectl apply -n knativetutorial -f src/main/k8s/configChain-trigger.yaml -kubectl apply -n knativetutorial -f src/main/k8s/annoatedChain-trigger.yaml +kubectl apply -n knativetutorial -f src/main/k8s/annotatedChain-trigger.yaml kubectl apply -n knativetutorial -f src/main/k8s/lastChainLink-trigger.yaml ---- @@ -310,33 +323,41 @@ You'll need two different terminal windows. One to do a curl request to the Bro files so you can see the messages flowing through the Funqy function event chain. Make sure you have the `stern` tool installed. See the Knative Tutorial setup for information on that. Run stern -to look for logs outputed by our Funqy deployment +to look for logs outputted by our Funqy deployment -[source, shell] +[source,bash] ---- stern funq user-container ---- Open a separate terminal. You'll first need to learn the URL of the broker. Execute this command to find it. -[source, shell] +[source,bash] ---- kubectl get broker default -o jsonpath='{.status.address.url}' ---- -This will provide you a url like this (exactly like this if you followed the knative tutorial): `http://default-broker.knativetutorial.svc.cluster.local` -Remember this URL. +This will provide you a URL similar to e.g.: `http://broker-ingress.knative-eventing.svc.cluster.local/knativetutorial/default`. Remember this URL. Next thing we need to do is ssh into our Kubernetes cluster so that we can send a curl request to our broker. +Following command will create a simple OS pod so we can curl into our functions. + +[source,bash] +---- +kubectl -n knativetutorial apply -f src/main/k8s/curler.yaml +---- + +You might need to wait a couple of seconds until the curler pod comes up. Run the following to get bash access to the curler pod: -[source, shell] +[source,bash] ---- kubectl -n knativetutorial exec -it curler -- /bin/bash ---- -You will now be in a shell within the Kubernetes cluster. Within the shell, execute this curl command -[source, shell] +You will now be in a shell within the Kubernetes cluster. Within the shell, execute this curl command , the broker address is an example and might differ based on your project or broker name. + +[source,bash] ---- curl -v "http://default-broker.knativetutorial.svc.cluster.local" \ -X POST \ diff --git a/docs/src/main/asciidoc/funqy.adoc b/docs/src/main/asciidoc/funqy.adoc index 9fb28d0520a04..5afa10ccba5a3 100644 --- a/docs/src/main/asciidoc/funqy.adoc +++ b/docs/src/main/asciidoc/funqy.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Funqy diff --git a/docs/src/main/asciidoc/gcp-functions-http.adoc b/docs/src/main/asciidoc/gcp-functions-http.adoc index bfc037366226a..67bf8bce26327 100644 --- a/docs/src/main/asciidoc/gcp-functions-http.adoc +++ b/docs/src/main/asciidoc/gcp-functions-http.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Google Cloud Functions (Serverless) with RESTEasy, Undertow, or Vert.x Web :extension-status: experimental @@ -9,9 +9,9 @@ https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc include::./attributes.adoc[] The `quarkus-google-cloud-functions-http` extension allows you to write microservices with RESTEasy (JAX-RS), -Undertow (Servlet) or Vert.x Web, and make these microservices deployable to the Google Cloud Functions runtime. +Undertow (Servlet), Vert.x Web, or link:funqy-http[Funqy HTTP], and make these microservices deployable to the Google Cloud Functions runtime. -One Google Cloud Functions deployment can represent any number of JAX-RS, Servlet, or Vert.x Web endpoints. +One Google Cloud Functions deployment can represent any number of JAX-RS, Servlet, Vert.x Web, or link:funqy-http[Funqy HTTP] endpoints. As the Google Cloud Function Java engine is a new Beta feature of Google Cloud, this extension is flagged as experimental. @@ -30,7 +30,7 @@ To complete this guide, you need: == Solution This guide walks you through generating a sample project followed by creating three HTTP endpoints -written with JAX-RS APIs, Servlet APIs or Vert.x Web APIs. Once built, you will be able to deploy +written with JAX-RS APIs, Servlet APIs, Vert.x Web, or link:funqy-http[Funqy HTTP] APIs. Once built, you will be able to deploy the project to Google Cloud. If you don't want to follow all these steps, you can go right to the completed example. @@ -44,36 +44,36 @@ The solution is located in the `google-cloud-functions-http-quickstart` {quickst Create an application with the `quarkus-google-cloud-functions-http` extension. You can use the following Maven command to create it: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=google-cloud-functions-http \ -DclassName="org.acme.quickstart.GreetingResource" \ -Dpath="/hello" \ - -Dextensions="google-cloud-functions-http,resteasy-json,undertow,vertx-web" + -Dextensions="resteasy,google-cloud-functions-http,resteasy-json,undertow,vertx-web,funqy-http" ---- == Login to Google Cloud Login to Google Cloud is necessary for deploying the application and it can be done as follows: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud auth login ---- At the time of this writing, Cloud Functions are still in beta so make sure to install the `beta` command group. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud components install beta ---- == Creating the endpoints -For this example project, we will create three endpoints, one for RESTEasy (JAX-RS), one for Undertow (Servlet) -and one for Vert.x Web (reactive routes). +For this example project, we will create four endpoints, one for RESTEasy (JAX-RS), one for Undertow (Servlet), +one for Vert.x Web (reactive routes) and one for link:funqy-http[Funqy HTTP]. If you don't need endpoints of each type, you can remove the corresponding extensions from your `pom.xml`. @@ -146,6 +146,20 @@ public class GreetingRoutes { } ---- +=== The Funqy HTTP endpoint + +[source,java] +---- +import io.quarkus.funqy.Funq; + +public class GreetingFunqy { + @Funq + public String funqy() { + return "Make it funqy"; + } +} +---- + == Build and Deploy to Google Cloud NOTE: Quarkus forces a packaging of type `uber-jar` for your function as Google Cloud Function deployment requires a single JAR. @@ -155,7 +169,7 @@ The result of the previous command is a single JAR file inside the `target/deplo Then you will be able to use `gcloud` to deploy your function to Google Cloud. -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-http \ --entry-point=io.quarkus.gcp.functions.http.QuarkusHttpFunction \ @@ -185,6 +199,7 @@ You can then call your endpoints via: - For JAX-RS: {httpsTrigger.url}/hello - For servlet: {httpsTrigger.url}/servlet/hello - For Vert.x Web: {httpsTrigger.url}/vertx/hello +- For Funqy: {httpsTrigger.url}/funqy == Testing locally @@ -192,22 +207,26 @@ The easiest way to locally test your function is using the Cloud Function invoke You can download it via Maven using the following command: -[source] +[source,bash] ---- mvn dependency:copy \ -Dartifact='com.google.cloud.functions.invoker:java-function-invoker:1.0.0-beta1' \ -DoutputDirectory=. ---- +Before using the invoker, you first need to build your function via `mvn package`. + Then you can use it to launch your function locally. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/deployment/google-cloud-functions-http-1.0-SNAPSHOT-runner.jar \ + --classpath target/deployment/google-cloud-functions-http-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.gcp.functions.http.QuarkusHttpFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Your endpoints will be available on http://localhost:8080. == What's next? diff --git a/docs/src/main/asciidoc/gcp-functions.adoc b/docs/src/main/asciidoc/gcp-functions.adoc old mode 100755 new mode 100644 index 2fe67612d58f0..5f756e8fbe01b --- a/docs/src/main/asciidoc/gcp-functions.adoc +++ b/docs/src/main/asciidoc/gcp-functions.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Google Cloud Functions (Serverless) :extension-status: experimental @@ -42,19 +42,19 @@ The solution is located in the `google-cloud-functions-quickstart` {quickstarts- Create an application with the `quarkus-google-cloud-functions` extension. You can use the following Maven command to create it: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=google-cloud-functions \ -DclassName="org.acme.quickstart.GreetingResource" \ -Dpath="/hello" \ - -Dextensions="google-cloud-functions" + -Dextensions="resteasy,google-cloud-functions" ---- Now, let's remove what's not needed inside the generated application: -- Remove the dependency `io.quarkus:quarkus-reasteasy` from your `pom.xml` file. +- Remove the dependency `io.quarkus:quarkus-resteasy` from your `pom.xml` file. - Remove the generated `org.acme.quickstart.GreetingResource` class. - Remove the `index.html` from `resources/META-INF/resources` or it will be picked up instead of your Function. - Remove the existing tests. @@ -63,25 +63,25 @@ Now, let's remove what's not needed inside the generated application: Login to Google Cloud is necessary for deploying the application and it can be done as follows: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud auth login ---- At the time of this writing, Cloud Functions are still in beta so make sure to install the `beta` command group. -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- gcloud components install beta ---- == Creating the functions -For this example project, we will create three functions, one `HttpFunction`, one `BackgroundFunction` (Storage event) and one `RawBakgroundFunction` (PubSub event). +For this example project, we will create three functions, one `HttpFunction`, one `BackgroundFunction` (Storage event) and one `RawBackgroundFunction` (PubSub event). == Choose Your Function -The `quarkus-google-cloud-functions` extension scans your project for a class that directly implements the Google Cloud `HttpFunction`, `BackgroundFunction` or `RawBakgroundFunction` interface. +The `quarkus-google-cloud-functions` extension scans your project for a class that directly implements the Google Cloud `HttpFunction`, `BackgroundFunction` or `RawBackgroundFunction` interface. It must find a class in your project that implements one of these interfaces or it will throw a build time failure. If it finds more than one function classes, a build time exception will also be thrown. @@ -91,7 +91,7 @@ project and use configuration or an environment variable to pick the function yo To configure the name of the function, you can use the following configuration property: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- quarkus.google-cloud-functions.function=test ---- @@ -239,7 +239,7 @@ This means that Cloud Build is not activated yet. To overcome this error, open t This is an example command to deploy your `HttpFunction` to Google Cloud: -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-http \ --entry-point=io.quarkus.gcp.functions.QuarkusHttpFunction \ @@ -257,7 +257,7 @@ This command will give you as output a `httpsTrigger.url` that points to your fu Before deploying your function, you need to create a bucket. -[source] +[source,bash] ---- gsutil mb gs://quarkus-hello ---- @@ -265,7 +265,7 @@ gsutil mb gs://quarkus-hello This is an example command to deploy your `BackgroundFunction` to Google Cloud, as the function is triggered by a Storage event, it needs to use `--trigger-event google.storage.object.finalize` and the `--trigger-resource` parameter with the name of a previously created bucket: -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-storage \ --entry-point=io.quarkus.gcp.functions.QuarkusBackgroundFunction \ @@ -280,7 +280,7 @@ The entry point must always be set to `io.quarkus.gcp.functions.QuarkusBackgroun To trigger the event, you can send a file to the GCS `quarkus-hello` bucket or you can use gcloud to simulate one: -[source] +[source,bash] ---- gcloud functions call quarkus-example-storage --data '{"name":"test.txt"}' ---- @@ -292,7 +292,7 @@ NOTE: `--data` contains the GCS event, it is a JSON document with the name of th This is an example command to deploy your `RawBackgroundFunction` to Google Cloud, as the function is triggered by a PubSub event, it needs to use `--trigger-event google.pubsub.topic.publish` and the `--trigger-resource` parameter with the name of a previously created topic: -[source] +[source,bash] ---- gcloud beta functions deploy quarkus-example-pubsub \ --entry-point=io.quarkus.gcp.functions.QuarkusBackgroundFunction \ @@ -304,9 +304,9 @@ gcloud beta functions deploy quarkus-example-pubsub \ The entry point must always be set to `io.quarkus.gcp.functions.QuarkusBackgroundFunction` as this is the class that integrates Cloud Functions with Quarkus. ==== -To trigger the event, you can send a file to the `hello_topic` topic of you can use gcloud to simulate one: +To trigger the event, you can send a file to the `hello_topic` topic or you can use gcloud to simulate one: -[source] +[source,bash] ---- gcloud functions call quarkus-example-pubsub --data '{"data":{"greeting":"world"}}' ---- @@ -317,40 +317,46 @@ The easiest way to locally test your function is using the Cloud Function invoke You can download it via Maven using the following command: -[source] +[source,bash] ---- mvn dependency:copy \ -Dartifact='com.google.cloud.functions.invoker:java-function-invoker:1.0.0-beta1' \ -DoutputDirectory=. ---- +Before using the invoker, you first need to build your function via `mvn package`. + === The HttpFunction To test an `HttpFunction`, you can use this command to launch your function locally. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/google-cloud-functions-1.0-SNAPSHOT-runner.jar \ + --classpath target/google-cloud-functions-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.gcp.functions.QuarkusHttpFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Your endpoints will be available on http://localhost:8080. === The BackgroundFunction For background functions, you launch the invoker with a target class of `io.quarkus.gcp.functions.BackgroundFunction`. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/google-cloud-functions-1.0-SNAPSHOT-runner.jar \ + --classpath target/google-cloud-functions-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.gcp.functions.QuarkusBackgroundFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Then you can call your background function via an HTTP call with a payload containing the event: -[source] +[source,bash] ---- curl localhost:8080 -d '{"data":{"name":"hello.txt"}}' ---- @@ -361,16 +367,18 @@ This will call your Storage background function with an event `{"name":"hello.tx For background functions, you launch the invoker with a target class of `io.quarkus.gcp.functions.BackgroundFunction`. -[source] +[source,bash] ---- java -jar java-function-invoker-1.0.0-beta1.jar \ - --classpath target/google-cloud-functions-1.0-SNAPSHOT-runner.jar \ + --classpath target/google-cloud-functions-1.0.0-SNAPSHOT-runner.jar \ --target io.quarkus.gcp.functions.QuarkusBackgroundFunction ---- +IMPORTANT: The `--classpath` parameter needs to be set to the previously packaged JAR that contains your function class and all Quarkus related classes. + Then you can call your background function via an HTTP call with a payload containing the event: -[source] +[source,bash] ---- curl localhost:8080 -d '{"data":{"greeting":"world"}}' ---- diff --git a/docs/src/main/asciidoc/getting-started-knative.adoc b/docs/src/main/asciidoc/getting-started-knative.adoc deleted file mode 100644 index f0a6f0ea3e5ea..0000000000000 --- a/docs/src/main/asciidoc/getting-started-knative.adoc +++ /dev/null @@ -1,133 +0,0 @@ -//// -This guide is maintained in the main Quarkus repository -and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc -//// -= Quarkus - Deploying Knative Application to Kubernetes or OpenShift - -include::./attributes.adoc[] -:experimental: - -This guide covers: - -The deployment of a serverless application to Kubernetes using https://knative.dev[Knative]. - -This guide takes as input the application developed in the link:building-native-image[native application guide]. -So, you should have been able to package your application as a binary executable, copied it in a Docker image and run this image. - -Depending on whether you are a _bare_ Kubernetes user or an OpenShift user, pick the section you need. -The OpenShift section leverages OpenShift build and route features which are not available in _bare_ Kubernetes. - -== Prerequisites - -For this guide you need: - -* roughly 20 minutes -* having access to a Kubernetes cluster. Minikube is a valid option. -* having deployed Knative components on https://knative.dev/docs/install/knative-with-minikube/[minikube] -* having https://skaffold.dev/[Skaffold] CLI on your PATH - -== Solution - -We recommend to follow the instructions in the next sections and build the application step by step. However, you can go right to the completed example. - -Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. - -The solution is located in the `getting-started-knative` directory. - -== Set up Kubernetes Cluster - -The following script will start minikube and deploy Knative. - -[source,bash] ----- -./bin/start-minikube.sh -eval $(minikube docker-env) #<1> ----- - -<1> Make the current Docker context to be that of minikube - -== Set up Nexus(Optional) - -Nexus is used for caching maven artifacts so that Apache Maven builds are faster. - -[source,bash] ----- -kubectl apply -f k8s/nexus.yaml ----- - -Wait for some time to have Nexus initialize and run. You can watch the status via `kubectl get pods -w`, use kbd:[Ctrl+c] to terminate the watch. - -== Why Skaffold ? - -As vanilla Kubernetes does not have an easy and developer friendly way to build and deploy application to a local cluster like minikube, without the need to push the image to external container registry. We will be using Skaffold to help us build and deploy the Quarkus application onto Kubernetes. - -== Build and deploy application - -[IMPORTANT] -==== -The container image will not be pushed to a remote container registry, and hence the container image url has to be `dev.local`, to make Knative deploy it without trying to pull it from external container registry. -==== - -To run Knative Quarkus applications, we need to use the multi stage docker build; to build the Quarkus application container image and use it in Kubernetes application deployment. - -The following command start a one time deployment of Quarkus application and runs starts the Knative service after successful container image build. - -[NOTE] -==== -If you want to deploy a Quarkus JVM image (using HotSpot), then run the following command before running Skaffold: - -[source,bash] ----- -cp src/main/docker/Dockerfile.jvm Dockerfile ----- - -If you want to deploy a Quarkus Native executable image (using GraalVM), then run the following command before running Skaffold: - -[source,bash] ----- -cp src/main/docker/Dockerfile.native Dockerfile ----- - -It is very important to note that the Dockerfile in `src/main/docker` folder has been modified Dockerfiles to support multi stage Docker build.The multi stage Docker build helps in building and containerization of application with single Dockerfile. -==== - -[source,bash] ----- -skaffold run ----- - -As it will take a few minutes for the build and deployment to be completed you can watch the status using: - -[source,bash] ----- -watch kubectl get pods ----- - -A successful Knative service deployment will show the following pods in the current namespace: - -[source,bash] ----- -NAME READY STATUS AGE -greeter-deployment-5749cc98fc-gs6zr 2/2 Running 10s ----- - -NOTE: The deployment name could differ in your environment. - -Once the Knative service is successfully running, you can call it using the script `./bin/call.sh`, which should return a response like `hello`. Allowing it to idle for approximately 60-65 seconds - that is without any further requests -, you will see it automatically scales down to zero pods. - -=== Cleanup - -To delete the application: - -[source,bash] ----- -skaffold delete ----- - -== Going further - -This guide covered the deployment of a Quarkus application as Knative application on Kubernetes -However, there is much more, and the integration with these environments has been tailored to make Quarkus applications execution very smooth. -For instance, the health extension can be used for health check; the configuration support allows mounting the application configuration using config map, the metric extension produces data _scrapable_ by Prometheus and so on. - diff --git a/docs/src/main/asciidoc/getting-started-reactive.adoc b/docs/src/main/asciidoc/getting-started-reactive.adoc index bfcaac781c09c..0976da65afef5 100644 --- a/docs/src/main/asciidoc/getting-started-reactive.adoc +++ b/docs/src/main/asciidoc/getting-started-reactive.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Getting started with Reactive @@ -12,7 +12,8 @@ This guide covers: * A quick glance at the Quarkus engine and how it enables reactive * A brief introduction to Mutiny - the reactive programming library used by Quarkus -* Bootstrapping a reactive application +* The difference between RESTEasy, RESTEasy Reactive and Reactive Routes +* The bootstrap of a reactive application using RESTEasy Reactive * Creating a reactive JAX-RS endpoint (asynchronous, streams...) * Using reactive database access * Interacting with other reactive APIs @@ -28,13 +29,13 @@ To complete this guide, you need: == Solutions -We recommend that you follow the instructions from <> and onwards to create the application step by step. +We recommend that you follow the instructions from <> and onwards to create the application step by step. However, you can go right to the completed example. Download an {quickstarts-archive-url}[archive] or clone the git repository: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- git clone {quickstarts-clone-url} ---- @@ -46,22 +47,30 @@ The solutions are located in the `getting-started-reactive` and `getting-started Quarkus is reactive. If you look under the hood, you will find a reactive engine powering your Quarkus application. This engine is Eclipse Vert.x (https://vertx.io). -Every IO interaction passes through the non-blocking and reactive Vert.x engine. +All network I/O passes through the non-blocking and reactive Vert.x engine. -image:quarkus-reactive-stack.png[alt=Quarkus is based on a reactive engine, 50%] +image:quarkus-reactive-stack.png[alt=Quarkus is based on a reactive engine, 70%] Let's take 2 examples to explain how it works. Imagine an incoming HTTP request. -The (Vert.x) HTTP server receives the request and then routes it to the application. -If the request targets a JAX-RS resource, the routing layer invokes the resource method in a worker thread and writes the response when the data is available. +The (Vert.x) HTTP server, embedded in Quarkus, receives the request and then routes it to the application. +If the request targets an _imperative_ method (traditional JAX-RS, code annotated with `@Blocking`...), the routing layer invokes the resource method in a _worker_ thread and writes the response when the data is available. So far, nothing new or outstanding. The following picture depicts this behavior. +In this case, the application code is invoked on a worker thread, and the business logic can block that thread. -image:http-blocking-sequence.png[alt=Behavior when using the imperative routes, 50%] +image:http-blocking-sequence.png[alt=Behavior when using the imperative routes, 70%] -But if the HTTP request targets a reactive (non-blocking) route, the routing layer invokes the route on the IO thread giving lots of benefits such as higher concurrency and performance: +But, if the HTTP request targets a reactive method (JAX-RS using RESTEasy Reactive, reactive routes, `@Incoming` method not annotated with `@Blocking`...), the routing layer invokes the route on the I/O thread giving lots of benefits such as higher concurrency and performance: -image:http-reactive-sequence.png[alt=Behavior when using the reactive routes, 50%] +image:http-reactive-sequence.png[alt=Behavior when using the reactive routes, 70%] + +Because Quarkus uses the I/O thread to invoke your code, we save context-switches, avoid large thread pool management, and so improve the resource utilization. +However, the code must **NOT** block that thread. +Why? Because, I/O threads are used to handle multiple concurrent requests. +As soon as the handling of a request cannot make progress because it needs to execute some I/O, it schedules these I/O and passes a continuation. +It releases the thread which can handle another request. +When the scheduled I/O complete, the continuation is executed, back on the I/O thread. As a consequence, many Quarkus components are designed with reactive in mind, such as database access (PostgreSQL, MySQL, Mongo, etc.), application services (mail, template engine, etc.), messaging (Kafka, AMQP, etc.) and so on. But, to fully benefit from this model, the application code should be written in a non-blocking manner. @@ -71,7 +80,7 @@ That’s where having a reactive API is an ultimate weapon. == Mutiny - A reactive programming library https://github.com/smallrye/smallrye-mutiny[Mutiny] is a reactive programming library allowing to express and compose asynchronous actions. -It offers 2 types: +It offers two types: * `io.smallrye.mutiny.Uni` - for asynchronous action providing 0 or 1 result * `io.smallrye.mutiny.Multi` - for multi-item (with back-pressure) streams @@ -160,18 +169,25 @@ But enough talking, let's get our hands dirty! == Bootstrapping the project +There are several ways to implement reactive application with Quarkus. +In this guide we are going to use RESTEasy Reactive, an implementation of RESTEasy benefiting from the Quarkus reactive engine. +By default, it invokes the HTTP endpoint on the I/O thread. + +IMPORTANT: While it's possible to use _traditional_ RESTEasy, you would need to add the `quarkus-resteasy-mutiny` extension, and the method will still be invoked on a _worker_ thread. +So, while it would use reactive programming, it would still require worker threads, which defeats the purpose. + The easiest way to create a new Quarkus project is to open a terminal and run the following command: For Linux and macOS users -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=getting-started-reactive \ - -DclassName="org.acme.quickstart.ReactiveGreetingResource" \ + -DclassName="org.acme.getting.started.ReactiveGreetingResource" \ -Dpath="/hello" \ - -Dextensions="resteasy-mutiny, resteasy-jsonb" + -Dextensions="resteasy-reactive" cd getting-started-reactive ---- @@ -179,16 +195,16 @@ For Windows users - If using cmd, (don't use forward slash `\`) -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- -mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create -DprojectGroupId=org.acme -DprojectArtifactId=getting-started-reactive -DclassName="org.acme.quickstart.ReactiveGreetingResource" -Dpath="/hello" -Dextensions="resteasy-mutiny,resteasy-jsonb" +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create -DprojectGroupId=org.acme -DprojectArtifactId=getting-started-reactive -DclassName="org.acme.getting.started.ReactiveGreetingResource" -Dpath="/hello" -Dextensions="resteasy-reactive" ---- - If using Powershell, wrap `-D` parameters in double quotes -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- -mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create "-DprojectGroupId=org.acme" "-DprojectArtifactId=getting-started-reactive" "-DclassName=org.acme.quickstart.ReactiveGreetingResource" "-Dpath=/hello" "-Dextensions=resteasy-mutiny,resteasy-jsonb" +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create "-DprojectGroupId=org.acme" "-DprojectArtifactId=getting-started-reactive" "-DclassName=org.acme.getting.started.ReactiveGreetingResource" "-Dpath=/hello" "-Dextensions=resteasy-reactive" ---- It generates the following in `./getting-started-reactive`: @@ -200,15 +216,13 @@ It generates the following in `./getting-started-reactive`: * example `Dockerfile` files for both `native` and `jvm` modes in `src/main/docker` * the application configuration file -The generated `pom.xml` also declares the RESTEasy Mutiny support and RESTEasy JSON-B to serialize payloads. - === Reactive JAX-RS resources -During the project creation, the `src/main/java/org/acme/quickstart/ReactiveGreetingResource.java` file has been created with the following content: +During the project creation, the `src/main/java/org/acme/getting/started/ReactiveGreetingResource.java` file has been created with the following content: [source,java] ---- -package org.acme.quickstart; +package org.acme.getting.started; import javax.ws.rs.GET; import javax.ws.rs.Path; @@ -221,12 +235,31 @@ public class ReactiveGreetingResource { @GET @Produces(MediaType.TEXT_PLAIN) public String hello() { - return "hello"; + return "Hello RESTEasy Reactive"; } } ---- -It's a very simple REST endpoint, returning "hello" to requests on "/hello". +It's a very simple REST endpoint, returning "Hello RESTEasy Reactive" to requests on "/hello". +As it uses RESTEAsy Reactive, this method is called on the I/O thread. + +[NOTE] +==== +To instruct Quarkus to invoke this method on a _worker_ thread, annotate it with the `io.smallrye.common.annotation.Blocking` annotation. +You can use `@Blocking` on a method, class or enable it for the whole application by annotated an `Application` class: + +[source, java] +---- +import javax.ws.rs.ApplicationPath; +import javax.ws.rs.core.Application; +import io.smallrye.common.annotation.Blocking; + +@ApplicationPath("/") +@Blocking +public class RestBlockingApplication extends Application { +} +---- +==== Let's now create a `ReactiveGreetingService` class with the following content: @@ -245,7 +278,7 @@ public class ReactiveGreetingService { public Uni greeting(String name) { return Uni.createFrom().item(name) - .onItem().transform(n -> String.format("hello %s", name)); + .onItem().transform(n -> String.format("hello %s", n)); } } ---- @@ -264,8 +297,6 @@ import javax.ws.rs.core.MediaType; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; -import org.jboss.resteasy.annotations.SseElementType; -import org.jboss.resteasy.annotations.jaxrs.PathParam; import org.reactivestreams.Publisher; @Path("/hello") @@ -277,7 +308,7 @@ public class ReactiveGreetingResource { @GET @Produces(MediaType.TEXT_PLAIN) @Path("/greeting/{name}") - public Uni greeting(@PathParam String name) { + public Uni greeting(String name) { return service.greeting(name); } @@ -290,16 +321,17 @@ public class ReactiveGreetingResource { ---- The `ReactiveGreetingService` class contains a straightforward method producing a `Uni`. -While, in this example, the resulting item is emitted immediately, you can imagine any async API producing a `Uni`. We cover this later in this guide. +While, in this example, the resulting item is emitted immediately, you can imagine any async API producing a `Uni`. +We cover this later in this guide. Now, start the application using: -[source, shell] +[source,bash] ---- ./mvnw quarkus:dev ---- -Once running, check that you get the expected greeting message by opening http://localhost:8080/hello/greeting/neo. +Once running, check you get the expected greeting message by opening http://localhost:8080/hello/greeting/neo. == Handling streams So far, we only return an asynchronous result. @@ -317,6 +349,8 @@ public Multi greetings(int count, String name) { } ---- +TIP: you may need to add the `import io.smallrye.mutiny.Multi;` and `import java.time.Duration;` statements. + It generates a greeting message every second and stops after `count` messages. In the `ReactiveGreetingResource` add the following method: @@ -326,7 +360,7 @@ In the `ReactiveGreetingResource` add the following method: @GET @Produces(MediaType.APPLICATION_JSON) @Path("/greeting/{count}/{name}") -public Multi greetings(@PathParam int count, @PathParam String name) { +public Multi greetings(int count, String name) { return service.greetings(count, name); } ---- @@ -335,10 +369,11 @@ This endpoint streams the items to the client as a JSON Array. The name and number of messages are parameterized using path parameters. So calling the endpoint produces something like: -[source, shell] + +[source,shell] ---- $ curl http://localhost:8080/hello/greeting/3/neo -["hello neo - 0","hello neo - 1","hello neo - 2"] +["hello neo - 0", "hello neo - 1", "hello neo - 2"] ---- We can also generate Server-Sent Event responses by returning a `Multi`: @@ -347,19 +382,21 @@ We can also generate Server-Sent Event responses by returning a `Multi`: ---- @GET @Produces(MediaType.SERVER_SENT_EVENTS) -@SseElementType(MediaType.TEXT_PLAIN) +@RestSseElementType(MediaType.TEXT_PLAIN) @Path("/stream/{count}/{name}") -public Multi greetingsAsStream(@PathParam int count, @PathParam String name) { +public Multi greetingsAsStream(int count, String name) { return service.greetings(count, name); } ---- -The only difference with the previous snippet is the produced type and the `@SseElementType` annotation indicating the type of each event. +The only difference with the previous snippet is the produced type and the `@RestSseElementType` annotation indicating the type of each event. As the `@Produces` annotation defines `SERVER_SENT_EVENTS`, JAX-RS needs it to knows the content type of each (nested) event. +TIP: You may need to add the `import org.jboss.resteasy.reactive.RestSseElementType;` statement. + You can see the result using: -[source, shell] +[source,shell] ---- $ curl -N http://localhost:8080/hello/stream/5/neo data: hello neo - 0 @@ -382,20 +419,20 @@ In this section, we are going to see how you can use the Reactive PostgreSQL dri Create a new project using: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=getting-started-reactive-crud \ -DclassName="org.acme.reactive.crud.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-mutiny, resteasy-jsonb, reactive-pg-client" + -Dextensions="resteasy-reactive,resteasy-reactive-jackson,reactive-pg-client" cd getting-started-reactive-crud ---- This application is interacting with a PostgreSQL database, so you need one: -[source, shell] +[source,bash] ---- docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 \ --name postgres-quarkus-reactive -e POSTGRES_USER=quarkus_test \ @@ -493,21 +530,57 @@ These methods return either `Unis` or `Multis` as the produced items are emitted Notice that the reactive PostgreSQL client already provides `Uni` and `Multi` instances. So you only transform the results from the database into _business-friendly_ objects. -Then, let's use this `Fruit` class in the `FruitResource`. -Edit the `FruitResource` class to match the following content: +For the purposes of initializing the database when the application starts, we will create a class named `DBInit` with the following content: [source, java] ---- package org.acme.reactive.crud; -import io.smallrye.mutiny.Multi; -import io.smallrye.mutiny.Uni; +import io.quarkus.runtime.StartupEvent; import io.vertx.mutiny.pgclient.PgPool; import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.resteasy.annotations.jaxrs.PathParam; -import javax.annotation.PostConstruct; -import javax.inject.Inject; +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.event.Observes; + +@ApplicationScoped +public class DBInit { + + private final PgPool client; + private final boolean schemaCreate; + + public DBInit(PgPool client, @ConfigProperty(name = "myapp.schema.create", defaultValue = "true") boolean schemaCreate) { + this.client = client; + this.schemaCreate = schemaCreate; + } + + void onStart(@Observes StartupEvent ev) { + if (schemaCreate) { + initdb(); + } + } + + private void initdb() { + client.query("DROP TABLE IF EXISTS fruits").execute() + .flatMap(r -> client.query("CREATE TABLE fruits (id SERIAL PRIMARY KEY, name TEXT NOT NULL)").execute()) + .flatMap(r -> client.query("INSERT INTO fruits (name) VALUES ('Kiwi')").execute()) + .flatMap(r -> client.query("INSERT INTO fruits (name) VALUES ('Durian')").execute()) + .flatMap(r -> client.query("INSERT INTO fruits (name) VALUES ('Pomelo')").execute()) + .flatMap(r -> client.query("INSERT INTO fruits (name) VALUES ('Lychee')").execute()) + .await().indefinitely(); + } +} +---- + +Then, let's use this `Fruit` class in the `FruitResource`. +Edit the `FruitResource` class to match the following content: + +[source, java] +---- +package org.acme.reactive.crud; + +import java.net.URI; + import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; @@ -519,25 +592,20 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; -import java.net.URI; + +import io.smallrye.mutiny.Multi; +import io.smallrye.mutiny.Uni; +import io.vertx.mutiny.pgclient.PgPool; @Path("fruits") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class FruitResource { - @Inject - @ConfigProperty(name = "myapp.schema.create", defaultValue = "true") - boolean schemaCreate; - - @Inject - PgPool client; + private final PgPool client; - @PostConstruct - void config() { - if (schemaCreate) { - initdb(); - } + public FruitResource(PgPool client) { + this.client = client; } private void initdb() { @@ -557,7 +625,7 @@ public class FruitResource { @GET @Path("{id}") - public Uni getSingle(@PathParam Long id) { + public Uni getSingle(Long id) { return Fruit.findById(client, id) .onItem().transform(fruit -> fruit != null ? Response.ok(fruit) : Response.status(Status.NOT_FOUND)) .onItem().transform(ResponseBuilder::build); @@ -572,7 +640,7 @@ public class FruitResource { @PUT @Path("{id}") - public Uni update(@PathParam Long id, Fruit fruit) { + public Uni update(Long id, Fruit fruit) { return fruit.update(client) .onItem().transform(updated -> updated ? Status.OK : Status.NOT_FOUND) .onItem().transform(status -> Response.status(status).build()); @@ -580,7 +648,7 @@ public class FruitResource { @DELETE @Path("{id}") - public Uni delete(@PathParam Long id) { + public Uni delete(Long id) { return Fruit.delete(client, id) .onItem().transform(deleted -> deleted ? Status.NO_CONTENT : Status.NOT_FOUND) .onItem().transform(status -> Response.status(status).build()); @@ -597,7 +665,7 @@ Also, you can use Vert.x clients directly. First of all, make sure the `quarkus-vertx` extension is present. If not, activate the extension by executing the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:add-extensions \ -Dextensions=vertx diff --git a/docs/src/main/asciidoc/getting-started-testing.adoc b/docs/src/main/asciidoc/getting-started-testing.adoc index c24a5115e240a..bc51fdbda8a41 100644 --- a/docs/src/main/asciidoc/getting-started-testing.adoc +++ b/docs/src/main/asciidoc/getting-started-testing.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Testing Your Application @@ -47,7 +47,7 @@ However, you can go right to the completed example. Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. -The solution is located in the `getting-started-testing` directory. +The solution is located in the `getting-started-testing` {quickstarts-tree-url}/getting-started-testing[directory]. This guide assumes you already have the completed application from the `getting-started` directory. @@ -159,6 +159,16 @@ quarkus.http.test-ssl-port=8446 Quarkus also provides RestAssured integration that updates the default port used by RestAssured before the tests are run, so no additional configuration should be required. +=== Controlling HTTP interaction timeout + +When using REST Assured in your test, the connection and response timeouts are set to 30 seconds. +You can override this setting with the `quarkus.http.test-timeout` property: + +[source] +---- +quarkus.http.test-timeout=10s +---- + === Injecting a URI It is also possible to directly inject the URL into the test which can make is easy to use a different client. This is @@ -228,6 +238,117 @@ public class StaticContentTest { For now `@TestHTTPResource` allows you to inject `URI`, `URL` and `String` representations of the URL. +== Testing a specific endpoint + +Both RESTassured and `@TestHTTPResource` allow you to specify the endpoint class you are testing rather than hard coding +a path. This currently supports both JAX-RS endpoints, Servlets and Reactive Routes. This makes it a lot easier to see exactly which endpoints +a given test is testing. + +For the purposes of these examples I am going to assume we have an endpoint that looks like the following: + +[source,java] +---- +@Path("/hello") +public class GreetingResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "hello"; + } +} +---- + +NOTE: This currently does not support the `@ApplicationPath()` annotation to set the JAX-RS context path. Use the +`quarkus.resteasy.path` config value instead if you want a custom context path. + +=== TestHTTPResource + +You can the use the `io.quarkus.test.common.http.TestHTTPEndpoint` annotation to specify the endpoint path, and the path +will be extracted from the provided endpoint. If you also specify a value for the `TestHTTPResource` endpoint it will +be appended to the end of the endpoint path. + +[source,java] +---- +package org.acme.getting.started.testing; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.charset.StandardCharsets; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.common.http.TestHTTPEndpoint; +import io.quarkus.test.common.http.TestHTTPResource; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class StaticContentTest { + + @TestHTTPEndpoint(GreetingResource.class) // <1> + @TestHTTPResource + URL url; + + @Test + public void testIndexHtml() throws Exception { + try (InputStream in = url.openStream()) { + String contents = readStream(in); + Assertions.assertTrue(contents.equals("hello")); + } + } + + private static String readStream(InputStream in) throws IOException { + byte[] data = new byte[1024]; + int r; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + while ((r = in.read(data)) > 0) { + out.write(data, 0, r); + } + return new String(out.toByteArray(), StandardCharsets.UTF_8); + } +} +---- +<1> Because `GreetingResource` is annotated with `@Path("/hello")` the injected URL +will end with `/hello`. + +=== RESTassured + +To control the RESTassured base path (i.e. the default path that serves as the root for every +request) you can use the `io.quarkus.test.common.http.TestHTTPEndpoint` annotation. This can +be applied at the class or method level. To test out greeting resource we would do: + +[source,java] +---- +package org.acme.getting.started.testing; + +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.common.http.TestHTTPEndpoint; +import org.junit.jupiter.api.Test; + +import java.util.UUID; + +import static io.restassured.RestAssured.when; +import static org.hamcrest.CoreMatchers.is; + +@QuarkusTest +@TestHTTPEndpoint(GreetingResource.class) //<1> +public class GreetingResourceTest { + + @Test + public void testHelloEndpoint() { + when().get() //<2> + .then() + .statusCode(200) + .body(is("hello")); + } +} +---- +<1> This tells RESTAssured to prefix all requests with `/hello`. +<2> Note we don't need to specify a path here, as `/hello` is the default for this test + == Injection into tests So far we have only covered integration style tests that test the app via HTTP endpoints, but what if we want to do unit @@ -303,14 +424,24 @@ public class TestStereotypeTestCase { } ---- +== Tests and Transactions + +You can use the standard Quarkus `@Transactional` annotation on tests, but this means that the changes your +test makes to the database will be persistent. If you want any changes made to be rolled back at the end of +the test you can use the `io.quarkus.test.TestTransaction` annotation. This will run the test method in a +transaction, but roll it back once the test method is complete to revert any database changes. + == Enrichment via QuarkusTest*Callback Alternatively or additionally to an interceptor, you can enrich *all* your `@QuarkusTest` classes by implementing the following callback interfaces: -* `io.quarkus.test.junit.callback.QuarkusTestBeforeAllCallback` +* `io.quarkus.test.junit.callback.QuarkusTestBeforeClassCallback` +* `io.quarkus.test.junit.callback.QuarkusTestAfterConstructCallback` * `io.quarkus.test.junit.callback.QuarkusTestBeforeEachCallback` * `io.quarkus.test.junit.callback.QuarkusTestAfterEachCallback` +WARNING: `io.quarkus.test.junit.callback.QuarkusTestBeforeAllCallback` has been deprecated in favor of `io.quarkus.test.junit.callback.QuarkusTestAfterConstructCallback` and will be removed in future releases of Quarkus + Such a callback implementation has to be registered as a "service provider" as defined by `java.util.ServiceLoader`. E.g. the following sample callback: @@ -363,10 +494,12 @@ To implement a test profile we need to implement `io.quarkus.test.junit.QuarkusT package org.acme.getting.started.testing; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Set; import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.QuarkusTestProfile.TestResourceEntry; public class MockGreetingProfile implements QuarkusTestProfile { @@ -385,17 +518,122 @@ public class MockGreetingProfile implements QuarkusTestProfile { public String getConfigProfile() { <3> return "test"; } + + @Override + public List testResources() { <4> + return Collections.singletonList(new TestResourceEntry(CustomWireMockServerManager.class)); + } } ---- <1> This method allows us to override configuration properties. Here we are changing the JAX-RS root path. <2> This method allows us to enable CDI `@Alternative` beans. This makes it easy to mock out certain beans functionality. <3> This can be used to change the config profile. As this default is `test` this does nothing, but is included for completeness. +<4> This method allows us to apply **additional** `QuarkusTestResourceLifecycleManager` classes, specific for this profile only. If this +method is not overridden, then only the `QuarkusTestResourceLifecycleManager` classes enabled via the `@QuarkusTestResource` class +annotation will be used for the tests using this profile (which is the same behavior as tests that don't use a profile at all). Now we have defined our profile we need to include it on our test class. We do this with `@TestProfile(MockGreetingProfile.class)`. All the test profile config is stored in a single class, which makes it easy to tell if the previous test ran with the same configuration. +=== Running specific tests + +Quarkus provides the ability to limit test execution to tests with specific `@TestProfile` annotations. +This works by leveraging the `tags` method of `QuarkusTestProfile` in conjunction with the `quarkus.test.profile.tags` system property. + +Essentially, any `QuarkusTestProfile` with at least one matching tag matching the value of `quarkus.test.profile.tags` will be considered active +and all the tests annotated with `@TestProfile` of active profiles, will be run while the rest will be skipped. +This is best shown in the following example. + +First let's define a few `QuarkusTestProfile` implementations like so: +[source,java] +---- +public class Profiles { + + public static class NoTags implements QuarkusTestProfile { + + } + + public static class SingleTag implements QuarkusTestProfile { + @Override + public Set tags() { + return Collections.singleton("test1"); + } + } + + public static class MultipleTags implements QuarkusTestProfile { + @Override + public Set tags() { + return new HashSet<>(Arrays.asList("test1", "test2")); + } + } +} +---- + +Now let's assume that we have the following tests: + +[source,java] +---- +@QuarkusTest +public class NoQuarkusProfileTest { + + @Test + public void test() { + // test something + } +} +---- + +[source,java] +---- +@QuarkusTest +@TestProfile(Profiles.NoTags.class) +public class NoTagsTest { + + @Test + public void test() { + // test something + } +} +---- + +[source,java] +---- +@QuarkusTest +@TestProfile(Profiles.SingleTag.class) +public class SingleTagTest { + + @Test + public void test() { + // test something + } +} +---- + +[source,java] +---- +@QuarkusTest +@TestProfile(Profiles.MultipleTags.class) +public class MultipleTagsTest { + + @Test + public void test() { + // test something + } +} +---- + +Let's consider the following scenarios: + +* `quarkus.test.profile.tags` is not set: All tests will be executed. +* `quarkus.test.profile.tags=foo`: In this case none of tests will be executed because none of the tags defined on the `QuarkusTestProfile` implementations match the value of `quarkus.test.profile.tags`. +Note that `NoQuarkusProfileTest` is not executed either because it is not annotated with `@TestProfile`. +* `quarkus.test.profile.tags=test1`: In this case `SingleTagTest` and `MultipleTagsTest` will be run because the tags on their respective `QuarkusTestProfile` implementations +match the value of `quarkus.test.profile.tags`. +* `quarkus.test.profile.tags=test1,test3`: This case results in the same tests being executed as the previous case. +* `quarkus.test.profile.tags=test2,test3`: In this case only `MultipleTagsTest` will be run because `MultipleTagsTest` is the only `QuarkusTestProfile` implementation whose `tags` method +matches the value of `quarkus.test.profile.tags`. == Mock Support @@ -443,6 +681,7 @@ it will take effect all the time, not just when testing. Note that at present this approach does not work with native image testing, as this would required the test alternatives to be baked into the native image. +[[quarkus_mock]] === Mocking using QuarkusMock The `io.quarkus.test.junit.QuarkusMock` class can be used to temporarily mock out any normal scoped @@ -480,7 +719,7 @@ public class MockTestCase { @Test public void testPerTestMock() { - QuarkusMock.installMockForInstance(new BonourGreeter(), mockableBean2); // <2> + QuarkusMock.installMockForInstance(new BonjourGreeter(), mockableBean2); // <2> Assertions.assertEquals("A mock for Stuart", mockableBean1.greet("Stuart")); Assertions.assertEquals("Bonjour Stuart", mockableBean2.greet("Stuart")); } @@ -501,7 +740,7 @@ public class MockTestCase { } } - public static class BonourGreeter extends MockableBean2 { + public static class BonjourGreeter extends MockableBean2 { @Override public String greet(String name) { return "Bonjour " + name; @@ -737,7 +976,7 @@ If you are using the `quarkus-hibernate-orm-panache` or `quarkus-mongodb-panache == Testing Security -If you are using Quarkus Security, check out the link:security.adoc#testing-security[Testing Security] section for information on how to easily test security features of the application. +If you are using Quarkus Security, check out the link:security-testing[Testing Security] section for information on how to easily test security features of the application. [#quarkus-test-resource] == Starting services before the Quarkus application starts @@ -745,12 +984,80 @@ If you are using Quarkus Security, check out the link:security.adoc#testing-secu A very common need is to start some services on which your Quarkus application depends, before the Quarkus application starts for testing. To address this need, Quarkus provides `@io.quarkus.test.common.QuarkusTestResource` and `io.quarkus.test.common.QuarkusTestResourceLifecycleManager`. By simply annotating any test in the test suite with `@QuarkusTestResource`, Quarkus will run the corresponding `QuarkusTestResourceLifecycleManager` before any tests are run. -A test suite is also free to utilize multiple `@QuarkusTestResource` annotations, in which case all the corresponding `QuarkusTestResourceLifecycleManager` objects will be run before the tests. +A test suite is also free to utilize multiple `@QuarkusTestResource` annotations, in which case all the corresponding `QuarkusTestResourceLifecycleManager` objects will be run before the tests. When using multiple test resources they can be started concurrently. For that you need to set `@QuarkusTestResource(parallel = true)`. + +NOTE: test resources are global, even if they are defined on a test class or custom profile, which means they will all be activated for all tests, even though we do +remove duplicates. If you want to only enable a test resource on a single test class or test profile, you can use `@QuarkusTestResource(restrictToAnnotatedClass = true)`. Quarkus provides a few implementations of `QuarkusTestResourceLifecycleManager` out of the box (see `io.quarkus.test.h2.H2DatabaseTestResource` which starts an H2 database, or `io.quarkus.test.kubernetes.client.KubernetesMockServerTestResource` which starts a mock Kubernetes API server), but it is common to create custom implementations to address specific application needs. -Common cases include starting docker containers using https://www.testcontainers.org/[Testcontainers] (an example of which can be found https://github.com/quarkusio/quarkus-quickstarts/blob/master/kafka-quickstart/src/test/java/org/acme/kafka/KafkaResource.java[here]), -or starting a mock HTTP server using http://wiremock.org/[Wiremock] (an example of which can be found https://github.com/geoand/quarkus-test-demo/blob/master/src/test/java/org/acme/getting/started/country/WiremockCountries.java[here]). +Common cases include starting docker containers using https://www.testcontainers.org/[Testcontainers] (an example of which can be found https://github.com/quarkusio/quarkus-quickstarts/blob/main/kafka-quickstart/src/test/java/org/acme/kafka/KafkaResource.java[here]), +or starting a mock HTTP server using http://wiremock.org/[Wiremock] (an example of which can be found https://github.com/geoand/quarkus-test-demo/blob/main/src/test/java/org/acme/getting/started/country/WiremockCountries.java[here]). + +=== Annotation-based test resources + +It is possible to write test resources that are enabled and configured using annotations. This is enabled by placing the `@QuarkusTestResource` +on an annotation which will be used to enable and configure the test resource. + +For example, this defines the `@WithKubernetesTestServer` annotation, which you can use on your tests to activate the `KubernetesServerTestResource`, +but only for the annotated test class. You can also place them on your `QuarkusTestProfile` test profiles. + +[source,java] +---- +@QuarkusTestResource(KubernetesServerTestResource.class) +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface WithKubernetesTestServer { + /** + * Start it with HTTPS + */ + boolean https() default false; + + /** + * Start it in CRUD mode + */ + boolean crud() default true; + + /** + * Port to use, defaults to any available port + */ + int port() default 0; +} +---- + +The `KubernetesServerTestResource` class has to implement the +`QuarkusTestResourceConfigurableLifecycleManager` interface in order to be configured using the previous annotation: + +[source,java] +---- +public class KubernetesServerTestResource + implements QuarkusTestResourceConfigurableLifecycleManager { + + private boolean https = false; + private boolean crud = true; + private int port = 0; + + @Override + public void init(WithKubernetesTestServer annotation) { + this.https = annotation.https(); + this.crud = annotation.crud(); + this.port = annotation.port(); + } + + // ... +} +---- + +== Hang Detection + +`@QuarkusTest` has support for hang detection to help diagnose any unexpected hangs. If no progress is made for a specified +time (i.e. no JUnit callbacks are invoked) then Quarkus will print a stack trace to the console to help diagnose the hang. +The default value for this timeout is 10 minutes. + +No further action will be taken, and the tests will continue as normal (generally until CI times out), however the printed +stack traces should help diagnose why the build has failed. You can control this timeout with the +`quarkus.test.hang-detection-timeout` system property (you can also set this in application.properties, but this won't +be read until Quarkus has started, so the timeout for Quarkus start will be the default of 10 minutes). == Native Executable Testing @@ -760,6 +1067,23 @@ guide except injecting into tests (and the native executable runs in a separate This is covered in the link:building-native-image[Native Executable Guide]. +[WARNING] +==== +Although `@NativeImageTest` is not yet deprecated, it will be in the future as it's functionality is covered by `@QuarkusIntegrationTest` +which is described in the following section. +==== + +[#quarkus-integration-test] +== Using @QuarkusIntegrationTest + +`@QuarkusIntegrationTest` should be used to launch and test the artifact produced by the Quarkus build, and supports testing a jar (of whichever type), a native image or container-image. +Put simply, this means that if the result of a Quarkus build (`mvn package` or `gradle build`) is a jar, that jar will be launched as `java -jar ...` and tests run against it. +If instead a native image was build, then the application is launched as `./application ...` and again the tests run against the running application. +Finally, if a container image was created during the build (by using including the `quarkus-container-image-jib` or `quarkus-container-image-docker` extensions and having the +`quarkus.container-image.build=true` property configured), then a container is created and run (this requires the `docker` executable being present). + +As is the case with `@NativeImageTest`, this is a black box test that supports the same set features and has the same limitations. + [[test-from-ide]] == Running `@QuarkusTest` from an IDE @@ -767,7 +1091,7 @@ Most IDEs offer the possibility to run a selected class as JUnit test directly. * `java.util.logging.manager` (see link:logging[Logging Guide]) -* `maven.home` (only if there are any custom settings in `${maven.home}/conf/settings.mxl`, see link:maven-tooling[Maven Guide]) +* `maven.home` (only if there are any custom settings in `${maven.home}/conf/settings.xml`, see link:maven-tooling[Maven Guide]) * `maven.settings` (in case a custom version of `settings.xml` file should be used for the tests) diff --git a/docs/src/main/asciidoc/getting-started.adoc b/docs/src/main/asciidoc/getting-started.adoc index 377816b396422..3a1e60df3d615 100644 --- a/docs/src/main/asciidoc/getting-started.adoc +++ b/docs/src/main/asciidoc/getting-started.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Creating Your First Application @@ -56,18 +56,18 @@ This guide also covers the testing of the endpoint. == Solution -We recommend that you follow the instructions from <> and onwards to create the application step by step. +We recommend that you follow the instructions from <> and onwards to create the application step by step. However, you can go right to the completed example. Download an {quickstarts-archive-url}[archive] or clone the git repository: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- git clone {quickstarts-clone-url} ---- -The solution is located in the `getting-started` directory. +The solution is located in the `getting-started` {quickstarts-tree-url}/getting-started[directory]. == Bootstrapping the project @@ -75,7 +75,7 @@ The easiest way to create a new Quarkus project is to open a terminal and run th For Linux & MacOS users -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -87,16 +87,16 @@ cd getting-started For Windows users -- If using cmd , (don't use forward slash `\`) +- If using cmd , (don't use backward slash `\`) -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create -DprojectGroupId=org.acme -DprojectArtifactId=getting-started -DclassName="org.acme.getting.started.GreetingResource" -Dpath="/hello" ---- - If using Powershell , wrap `-D` parameters in double quotes -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create "-DprojectGroupId=org.acme" "-DprojectArtifactId=getting-started" "-DclassName=org.acme.getting.started.GreetingResource" "-Dpath=/hello" ---- @@ -120,8 +120,8 @@ In addition, you can see the `quarkus-maven-plugin` responsible of the packaging io.quarkus - quarkus-bom - ${quarkus.version} + quarkus-universe-bom + ${quarkus.platform.version} pom import @@ -133,11 +133,14 @@ In addition, you can see the `quarkus-maven-plugin` responsible of the packaging io.quarkus quarkus-maven-plugin - ${quarkus.version} + ${quarkus-plugin.version} + true build + generate-code + generate-code-tests @@ -198,7 +201,7 @@ Use: `./mvnw compile quarkus:dev`: ---- $ ./mvnw compile quarkus:dev [INFO] --------------------< org.acme:getting-started >--------------------- -[INFO] Building getting-started 1.0-SNAPSHOT +[INFO] Building getting-started 1.0.0-SNAPSHOT [INFO] --------------------------------[ jar ]--------------------------------- [INFO] [INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ getting-started --- @@ -219,10 +222,11 @@ Listening for transport dt_socket at address: 5005 Once started, you can request the provided endpoint: -``` +[source,shell] +---- $ curl -w "\n" http://localhost:8080/hello hello -``` +---- Hit `CTRL+C` to stop the application, or keep it running and enjoy the blazing fast hot-reload. @@ -235,7 +239,9 @@ We are using `curl -w "\n"` in this example to avoid your terminal printing a '% == Using injection Dependency injection in Quarkus is based on ArC which is a CDI-based dependency injection solution tailored for Quarkus' architecture. -You can learn more about it in the link:cdi-reference[Contexts and Dependency Injection guide]. +If you're new to CDI then we recommend you to read the link:cdi[Introduction to CDI] guide. + +Quarkus only implements a subset of the CDI features and comes with non-standard features and specific APIS, you can learn more about it in the link:cdi-reference[Contexts and Dependency Injection guide]. ArC comes as a dependency of `quarkus-resteasy` so you already have it handy. @@ -404,7 +410,7 @@ These tests use http://rest-assured.io/[RestAssured], but feel free to use your You can run these using Maven: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw test ---- @@ -429,7 +435,7 @@ property called `test.url` that is set to the base test URL for situations where == Working with multi-module project or external modules -Quarkus heavily utilizes https://github.com/wildfly/jandex[Jandex] at build time, to discover various classes or annotations. One immediately recongizable application of this, is CDI bean discovery. +Quarkus heavily utilizes https://github.com/wildfly/jandex[Jandex] at build time, to discover various classes or annotations. One immediately recognizable application of this, is CDI bean discovery. As a result, most of the Quarkus extensions will not work properly if this build time discovery isn't properly setup. This index is created by default on the project on which Quarkus is configured for, thanks to our Maven and Gradle plugins. @@ -446,20 +452,35 @@ Be sure to read the link:cdi-reference#bean_discovery[Bean Discovery] section of == Packaging and run the application The application is packaged using `./mvnw package`. -It produces 2 jar files in `/target`: +It produces several outputs in `/target`: -* `getting-started-1.0-SNAPSHOT.jar` - containing just the classes and resources of the projects, it's the regular -artifact produced by the Maven build; -* `getting-started-1.0-SNAPSHOT-runner.jar` - being an executable _jar_. Be aware that it's not an _über-jar_ as -the dependencies are copied into the `target/lib` directory. +* `getting-started-1.0.0-SNAPSHOT.jar` - containing just the classes and resources of the projects, it's the regular +artifact produced by the Maven build - it is *not* the runnable jar; +* the `quarkus-app` directory which contains the `quarkus-run.jar` jar file - being an executable _jar_. Be aware that it's not an _über-jar_ as +the dependencies are copied into subdirectories of `quarkus-app/lib/`. -You can run the application using: `java -jar target/getting-started-1.0-SNAPSHOT-runner.jar` +You can run the application using: `java -jar target/quarkus-app/quarkus-run.jar` -NOTE: The `Class-Path` entry of the `MANIFEST.MF` from the _runner jar_ explicitly lists the jars from the `lib` directory. -So if you want to deploy your application somewhere, you need to copy the _runner_ jar as well as the _lib_ directory. +NOTE: If you want to deploy your application soemwhere (typically in a container), you need to deploy the whole `quarkus-app` directory. NOTE: Before running the application, don't forget to stop the hot reload mode (hit `CTRL+C`), or you will have a port conflict. +=== Using fast-jar + +The previous section assumes that the configuration property `quarkus.package.type` has either not been configured explicitly, +or that it has been set to `legacy-jar` (in `application.properties` or any of the other supported configuration sources). +When the property has been set to `fast-jar` (which will become in Quarkus 1.12), then the result of executing +`./mvnw package` is a new directory under `target` named `quarkus-app`. + +You can run the application using: `java -jar target/quarkus-app/quarkus-run.jar`. + +WARNING: In order to successfully run the produced jar, you need to have the entire contents of the `quarkus-app` directory. If any of the files are missing, the application will not start or +might not function correctly. + +TIP: The `fast-jar` packaging results in creating an artifact that starts a little faster and consumes slightly less memory than a legacy Quarkus jar +because it has indexed information about which dependency jar contains classes and resources. It can thus avoid the lookup into potentially every jar +on the classpath that the legacy jar necessitates, when loading a class or resource. + [#banner] == Configuring the banner diff --git a/docs/src/main/asciidoc/gradle-config.adoc b/docs/src/main/asciidoc/gradle-config.adoc index f82b083dd34dc..fc3cb980093b5 100644 --- a/docs/src/main/asciidoc/gradle-config.adoc +++ b/docs/src/main/asciidoc/gradle-config.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Gradle Plugin Repositories diff --git a/docs/src/main/asciidoc/gradle-tooling.adoc b/docs/src/main/asciidoc/gradle-tooling.adoc index 891c91192cc96..eb2f0361e6aad 100644 --- a/docs/src/main/asciidoc/gradle-tooling.adoc +++ b/docs/src/main/asciidoc/gradle-tooling.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Building Quarkus apps with Gradle @@ -9,7 +9,7 @@ include::./attributes.adoc[] CAUTION: Quarkus Gradle support is considered preview. You can use Gradle to create Quarkus projects as outlined in our guides. If you go beyond there will be cases where -the Gradle tasks https://github.com/quarkusio/quarkus/issues/5101[does not behave as expected]. +the Gradle tasks https://github.com/quarkusio/quarkus/issues/10189[does not behave as expected]. This is just a caution, and we recommend if you like Gradle you try it out and give us feedback. [[project-creation]] @@ -17,14 +17,14 @@ This is just a caution, and we recommend if you like Gradle you try it out and g The easiest way to scaffold a Gradle project, is currently to use the Quarkus Maven plugin like so: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=my-groupId \ -DprojectArtifactId=my-artifactId \ -DprojectVersion=my-version \ -DclassName="org.my.group.MyResource" \ - -Dextensions="resteasy-jsonb" \ + -Dextensions="resteasy,resteasy-jackson" \ -DbuildTool=gradle ---- @@ -55,7 +55,7 @@ The following table lists the attributes you can pass to the `create` command: | The artifact id of the created project. Not passing it triggers the interactive mode. | `projectVersion` -| `1.0-SNAPSHOT` +| `1.0.0-SNAPSHOT` | The version of the created project | `className` @@ -120,14 +120,14 @@ It is not possible to use a custom test configuration profile in native mode for From inside a Quarkus project, you can obtain a list of the available extensions with: -[source,shell] +[source,bash] ---- ./gradlew listExtensions ---- You can enable an extension using: -[source,shell] +[source,bash] ---- ./gradlew addExtension --extensions="hibernate-validator" ---- @@ -141,7 +141,7 @@ If no extension is found or if more than one extensions match, you will see a re [source,shell] ---- -./gradlew addExtension --extensions="jdbc,agroal,non-exist-ent" +$ ./gradlew addExtension --extensions="jdbc,agroal,non-exist-ent" [...] ❌ Multiple extensions matching 'jdbc' * io.quarkus:quarkus-jdbc-h2 @@ -155,7 +155,7 @@ If no extension is found or if more than one extensions match, you will see a re You can install all extensions which match a globbing pattern: -[source,shell] +[source,bash] ---- ./gradlew addExtension --extensions="hibernate*" ---- @@ -165,7 +165,7 @@ You can install all extensions which match a globbing pattern: Quarkus comes with a built-in development mode. Run your application with: -[source,shell] +[source,bash] ---- ./gradlew quarkusDev ---- @@ -197,6 +197,25 @@ quarkusDev { By default, the `quarkusDev` task uses `compileJava` compiler options. These can be overridden by setting the `compilerArgs` property in the task. ==== +[NOTE] +==== +By default, `quarkusDev` sets the debug host to `localhost` (for security reasons). If you need to change this, for example to enable debugging on all hosts, you can use the `-DdebugHost` option like so: + +[source,bash] +---- +./gradlew quarkusDev -DdebugHost=0.0.0.0 +---- +==== +The plugin also exposes a `quarkusDev` configuration. Using this configuration to declare a dependency will restrict the usage of that dependency to development mode. +The `quarkusDev` configuration can be used as following: + +[source,groovy] +---- +dependencies { + quarkusDev 'io.quarkus:quarkus-jdbc-h2' +} +---- + === Remote Development Mode It is possible to use development mode remotely, so that you can run Quarkus in a container environment (such as OpenShift) @@ -206,24 +225,42 @@ This allows you to develop in the same environment you will actually run your ap WARNING: Do not use this in production. This should only be used in a development environment. You should not run production applications in dev mode. -To do this you must have the `quarkus-undertow-websockets` extension installed: +To do this you must build a mutable application, using the `mutable-jar` format. Set the following properties in `application.properties`: -[source,shell] +[source,properties] ---- -./gradlew addExtension --extensions="undertow-websockets" +quarkus.package.type=mutable-jar <1> +quarkus.live-reload.password=changeit <2> +quarkus.live-reload.url=http://my.cluster.host.com:8080 <3> ---- +<1> This tells Quarkus to use the mutable-jar format. Mutable applications also include the deployment time parts of Quarkus, +so they take up a bit more disk space. If run normally they start just as fast and use the same memory as an immutable application, +however they can also be started in dev mode. +<2> The password that is used to secure communication between the remote side and the local side. +<3> The URL that your app is going to be running in dev mode at. This is only needed on the local side, so you +may want to leave it out of the properties file and specify it as a system property on the command line. -You must also have the following config properties set: +The `mutable-jar` is then built in the same way that a regular Quarkus jar is built, i.e. by issuing: -- `quarkus.live-reload.password` -- `quarkus.live-reload.url` +[source,bash] +---- +./gradlew build +---- -These can be set via `application.properties`, or any other way (e.g. system properties, environment vars etc). The -password must be set on both the local and remote processes, while the url only needs to be set on the local host. +Before you start Quarkus on the remote host set the environment variable `QUARKUS_LAUNCH_DEVMODE=true`. If you are +on bare metal you can set it via the `export QUARKUS_LAUNCH_DEVMODE=true` command and then run the application with the proper `java -jar ...` command to run the application. -Start Quarkus in dev mode on the remote host. Now you need to connect your local agent to the remote host: +If you plan on running the application via Docker, then you'll need to add `-e QUARKUS_LAUNCH_DEVMODE=true` to the `docker run` command. +When the application starts you should now see the following line in the logs: `Profile dev activated. Live Coding activated`. -[source,shell] + +NOTE: The remote side does not need to include Maven or any other development tools. The normal `fast-jar` Dockerfile +that is generated with a new Quarkus application is all you need. If you are using bare metal launch the Quarkus runner +jar, do not attempt to run normal devmode. + +Now you need to connect your local agent to the remote host, using the `remote-dev` command: + +[source,bash] ---- ./gradlew quarkusRemoteDev -Dquarkus.live-reload.url=http://my-remote-host:8080 ---- @@ -231,6 +268,10 @@ Start Quarkus in dev mode on the remote host. Now you need to connect your local Now every time you refresh the browser you should see any changes you have made locally immediately visible in the remote app. +All the config options are shown below: + +include::{generated-dir}/config/quarkus-live-reload-live-reload-config.adoc[opts=optional, leveloffset=+1] + == Debugging In development mode, Quarkus starts by default with debug mode enabled, listening to port `5005` without suspending the JVM. @@ -298,7 +339,7 @@ Open the project directory in VS Code. If you have installed the Java Extension Native executables make Quarkus applications ideal for containers and serverless workloads. -Make sure to have `GRAALVM_HOME` configured and pointing to GraalVM version {graalvm-version}. +Make sure to have `GRAALVM_HOME` configured and pointing to GraalVM version {graalvm-version} (Make sure to use a Java 11 version of GraalVM). Create a native executable using: `./gradlew build -Dquarkus.package.type=native`. A native executable will be present in `build/`. @@ -322,16 +363,35 @@ quarkusBuild { } ---- +or if you are using the Gradle Kotlin DSL: + +[source,kotlin,subs=attributes+] +---- +tasks.quarkusBuild { + nativeArgs { + "container-build" to true <1> + "buildImage" to "quay.io/quarkus/ubi-quarkus-native-image:{graalvm-flavor}" <2> + "quarkus.native.java-home" to "/opt/java-11/" + } +} +---- + <1> Set `quarkus.native.containerBuild` property to `true` <2> Set `quarkus.native.buildImage` property to `quay.io/quarkus/ubi-quarkus-native-image:{graalvm-flavor}` +[WARNING] +==== +When using the Gradle Groovy DSL, property keys must follow lower camel case notation. +e.g. `container-build` is not valid, and should be replaced by `containerBuild`. +This limitation does not apply to the Gradle Kotlin DSL. +==== === Build a container friendly executable The native executable will be specific to your operating system. To create an executable that will run in a container, use the following: -[source,shell] +[source,bash] ---- ./gradlew build -Dquarkus.package.type=native -Dquarkus.native.container-build=true ---- @@ -355,27 +415,55 @@ Be aware that a given Quarkus version might not be compatible with all the image Run the native tests using: -[source,shell] +[source,bash] ---- ./gradlew testNative ---- This task depends on `quarkusBuild`, so it will generate the native image before running the tests. +[NOTE] +==== +By default, the `native-test` source set is based on `main` and `test` source sets. It is possible to add an extra source set. For example, if your integration tests are located in an `integrationTest` source set, you can specify it as: + +[source,groovy] +---- +quarkus { + sourceSets { + extraNativeTest = sourceSets.integrationTest + } +} +---- + +==== + +== Using fast-jar + +`fast-jar` is now the default quarkus package type. The result of `./gradlew build` command is a new directory under `build` named `quarkus-app`. + +You can run the application using: `java -jar target/quarkus-app/quarkus-run.jar`. + +WARNING: In order to successfully run the produced jar, you need to have the entire contents of the `quarkus-app` directory. If any of the files are missing, the application will not start or +might not function correctly. + +TIP: The `fast-jar` packaging results in creating an artifact that starts a little faster and consumes slightly less memory than a legacy Quarkus jar +because it has indexed information about which dependency jar contains classes and resources. It can thus avoid the lookup into potentially every jar +on the classpath that the legacy jar necessitates, when loading a class or resource. + == Building Uber-Jars -Quarkus Gradle plugin supports the generation of Uber-Jars by specifying an `--uber-jar` argument as follows: +Quarkus Gradle plugin supports the generation of Uber-Jars by specifying a `quarkus.package.type` argument as follows: -[source,shell] +[source,bash] ---- -./gradlew quarkusBuild --uber-jar +./gradlew quarkusBuild -Dquarkus.package.type=uber-jar ---- When building an Uber-Jar you can specify entries that you want to exclude from the generated jar by using the `--ignored-entry` argument: -[source,shell] +[source,bash] ---- -./gradlew quarkusBuild --uber-jar --ignored-entry=META-INF/file1.txt +./gradlew quarkusBuild -Dquarkus.package.type=uber-jar --ignored-entry=META-INF/file1.txt ---- The entries are relative to the root of the generated Uber-Jar. You can specify multiple entries by adding extra `--ignored-entry` arguments. @@ -388,7 +476,7 @@ By default, Quarkus will not discover CDI beans inside another module. The best way to enable CDI bean discovery for a module in a multi-module project would be to include a `META-INF/beans.xml` file, unless it is the main application module already configured with the quarkus-maven-plugin, in which case it will indexed automatically. -Alternatively, there is some unofficial link:https://plugins.gradle.org/search?term=jandex[Gradle Jandex plugins]) that can be used instead of the `META-INF/beans.xml` file. +Alternatively, there is some unofficial link:https://plugins.gradle.org/search?term=jandex[Gradle Jandex plugins] that can be used instead of the `META-INF/beans.xml` file. More information on this topic can be found on the link:cdi-reference#bean_discovery[Bean Discovery] section of the CDI guide. diff --git a/docs/src/main/asciidoc/grpc-getting-started.adoc b/docs/src/main/asciidoc/grpc-getting-started.adoc index ae0ed1717d720..e19de7e4cea90 100644 --- a/docs/src/main/asciidoc/grpc-getting-started.adoc +++ b/docs/src/main/asciidoc/grpc-getting-started.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Getting Started with gRPC @@ -25,7 +25,9 @@ Edit the `pom.xml` file to add the Quarkus gRPC extension dependency (just under ---- -Make sure you have `prepare` goal of `quarkus-maven-plugin` enabled in your `pom.xml`: +Make sure you have `generate-code` goal of `quarkus-maven-plugin` enabled in your `pom.xml`. +If you wish to generate code from different `proto` files for tests, also add the `generate-code-tests` goal. +Please note that no additional task/goal is required for the Gradle plugin. [source,xml] ---- @@ -34,11 +36,14 @@ Make sure you have `prepare` goal of `quarkus-maven-plugin` enabled in your `pom io.quarkus quarkus-maven-plugin + ${quarkus-plugin.version} + true - prepare build + generate-code + generate-code-tests @@ -49,12 +54,17 @@ Make sure you have `prepare` goal of `quarkus-maven-plugin` enabled in your `pom With this configuration, you can put your service and message definitions in the `src/main/proto` directory. `quarkus-maven-plugin` will generate Java files from your `proto` files. -Alternatively to using the `prepare` goal of the `quarkus-maven-plugin`, you can use `protobuf-maven-plugin` to generate these files, more in <> + +`quarkus-maven-plugin` retrieves a version of `protoc` (the protobuf compiler) from Maven repositories. The retrieved version matches your operating system and CPU architecture. +If this retrieved version does not work in your context, you can download the suitable binary and specify the location via +`-Dquarkus.grpc.protoc-path=/path/to/protoc`. + +Alternatively to using the `generate-code` goal of the `quarkus-maven-plugin`, you can use `protobuf-maven-plugin` to generate these files, more in <> Let's start with a simple _Hello_ service. Create the `src/main/proto/helloworld.proto` file with the following content: -[source] +[source,javascript] ---- syntax = "proto3"; @@ -86,36 +96,49 @@ This `proto` file defines a simple service interface with a single method (`SayH Before coding, we need to generate the classes used to implement and consume gRPC services. In a terminal, run: -[source, bash] +[source,shell] ---- $ mvn compile ---- -Once generated, you can look at the `target/generated-sources/protobuf` directory: +Once generated, you can look at the `target/generated-sources/grpc` directory: -[source, txt] +[source,txt] ---- -target/generated-sources/protobuf -├── grpc-java -│ └── io -│ └── quarkus -│ └── example -│ └── GreeterGrpc.java -└── java - └── io - └── quarkus - └── example - ├── HelloReply.java - ├── HelloReplyOrBuilder.java - ├── HelloRequest.java - ├── HelloRequestOrBuilder.java - ├── HelloWorldProto.java - └── MutinyGreeterGrpc.java +target/generated-sources/grpc +└── io + └── quarkus + └── example + ├── GreeterGrpc.java + ├── HelloReply.java + ├── HelloReplyOrBuilder.java + ├── HelloRequest.java + ├── HelloRequestOrBuilder.java + ├── HelloWorldProto.java + └── MutinyGreeterGrpc.java ---- These are the classes we are going to use. -IMPORTANT: Every time you update the `proto` files, you need to re-generate the classes (using `mvn compile`). + +=== `proto` files with imports + +Protocol Buffers specification provides a way to import `proto` files. +The Quarkus code generation mechanism lets you control the scope of dependencies to scan for possible imports by setting the `quarkus.generate-code.grpc.scan-for-imports` property to one of the following: + +- `all` - scan all the dependencies +- `none` - don't scan the dependencies, use only what is defined in the `src/main/proto` or `src/test/proto` +- `groupId1:artifactId1,groupId2:artifactId2` - scan only the dependencies with group id and artifact id in the list. + +If not specified, the property is set to `com.google.protobuf:protobuf-java`. +To override it, set the `quarkus.generate-code.grpc.scan-for-imports` property in your pom.xml (or gradle.properties) to the desired value, e.g. + +[source,xml] +---- + + all + +---- == Implementing a gRPC service @@ -129,7 +152,7 @@ IMPORTANT: Don't use `@ApplicationScoped` as the gRPC service implementation can Create the `src/main/java/org/acme/HelloService.java` file with the following content: -[source, java] +[source,java] ---- package org.acme; @@ -153,11 +176,11 @@ public class HelloService extends GreeterGrpc.GreeterImplBase { } ---- -1. Expose your implementation as bean +1. Expose your implementation as bean. 2. Extends the `ImplBase` class. This is a generated class. -3. Implement the methods defined in the service definition (here we have a single method) -4. Build and send the response -5. Close the response +3. Implement the methods defined in the service definition (here we have a single method). +4. Build and send the response. +5. Close the response. Quarkus also provides an additional model with Mutiny, a Reactive Programming API integrated in Quarkus. Learn more about Mutiny on the link:getting-started-reactive#mutiny[Getting Started with Reactive guide]. @@ -191,6 +214,11 @@ The main differences are the following: * it extends the `ImplBase` from `MutinyGreeterGrpc` instead of `GreeterGrpc` * the signature of the method is using Mutiny types +NOTE: If your service implementation logic is blocking (use blocking I/O for example), annotate your method with +`@Blocking`. +The `io.smallrye.common.annotation.Blocking` annotation instructs the framework to invoke the +annotated method on a worker thread instead of the I/O thread (event-loop). + === The gRPC server The services are _served_ by a _server_. @@ -243,20 +271,20 @@ public class ExampleResource { } ---- -1. Inject the service and configure its name.This name is used in the application configuration -2. Use the _blocking_ stub (also a generated class) -3. Invoke the service +1. Inject the service and configure its name. This name is used in the application configuration. +2. Use the _blocking_ stub (also a generated class). +3. Invoke the service. -We need to configure the application to indicate where is the `hello` service. +We need to configure the application to indicate where the `hello` service is found. In the `src/main/resources/application.properties` file, add the following property: -[source,text] +[source,properties] ---- quarkus.grpc.clients.hello.host=localhost ---- -`hello` is the name of the service used in the `@GrpcService` annotation -`host` configures the service host (here it's localhost). +- `hello` is the name of the service used in the `@GrpcService` annotation. +- `host` configures the service host (here it's localhost). Then, open http://localhost:8080/hello/quarkus in a browser, and you should get `Hello quarkus`! @@ -340,3 +368,5 @@ Then, add to the `build` section the `os-maven-plugin` extension and the `protob That's why we use the `os-maven-plugin` to target the executable compatible with the operating system. NOTE: This configuration instructs the `protobuf-maven-plugin` to generate the default gRPC classes and classes using Mutiny to fit with the Quarkus development experience. + +IMPORTANT: When using `protobuf-maven-plugin`, instead of the `quarkus-maven-plugin`, every time you update the `proto` files, you need to re-generate the classes (using `mvn compile`). diff --git a/docs/src/main/asciidoc/grpc-service-consumption.adoc b/docs/src/main/asciidoc/grpc-service-consumption.adoc index 6669fda56a54a..d5180ffa09619 100644 --- a/docs/src/main/asciidoc/grpc-service-consumption.adoc +++ b/docs/src/main/asciidoc/grpc-service-consumption.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Consuming a gRPC Service @@ -160,12 +160,16 @@ Don't forget to replace it with the name you used in in the `@GrpcService` annot === Enabling TLS -To enable TLS, use the following configuration: +To enable TLS, use the following configuration. +Note that all paths in the configuration may either specify a resource on the classpath +(typically from `src/main/resources` or its subfolder) or an external file. -[source] +[source,properties] ---- quarkus.grpc.clients.hello.host=localhost -quarkus.grpc.clients.hello.ssl.trust-store=src/main/resources/tls/ca.pem + +# either a path to a classpath resource or to a file: +quarkus.grpc.clients.hello.ssl.trust-store=tls/ca.pem ---- NOTE: When SSL/TLS is configured, `plain-text` is automatically disabled. @@ -174,11 +178,56 @@ NOTE: When SSL/TLS is configured, `plain-text` is automatically disabled. To use TLS with mutual authentication, use the following configuration: -[source] +[source,properties] ---- quarkus.grpc.clients.hello.host=localhost quarkus.grpc.clients.hello.plain-text=false -quarkus.grpc.clients.hello.ssl.certificate=src/main/resources/tls/client.pem -quarkus.grpc.clients.hello.ssl.key=src/main/resources/tls/client.key -quarkus.grpc.clients.hello.ssl.trust-store=src/main/resources/tls/ca.pem + +# all the following may use either a path to a classpath resource or to a file: +quarkus.grpc.clients.hello.ssl.certificate=tls/client.pem +quarkus.grpc.clients.hello.ssl.key=tls/client.key +quarkus.grpc.clients.hello.ssl.trust-store=tls/ca.pem +---- + +== Client Interceptors + +You can implement a gRPC client interceptor by implementing an `@ApplicationScoped` bean implementing `io.grpc.ClientInterceptor`: + +[source, java] +---- +@ApplicationScoped +public class MyInterceptor implements ClientInterceptor { + + @Override + public ClientCall interceptCall(MethodDescriptor method, + CallOptions callOptions, Channel next) { + // ... + } +} ---- + +TIP: Check the https://grpc.github.io/grpc-java/javadoc/io/grpc/ClientInterceptor.html[ClientInterceptor JavaDoc] to properly implement your interceptor. + +When you have multiple client interceptors, you can order them by implementing the `javax.enterprise.inject.spi.Prioritized` interface: + +source, java] +---- +@ApplicationScoped +public class MyInterceptor implements ClientInterceptor, Prioritized { + + @Override + public ClientCall interceptCall(MethodDescriptor method, + CallOptions callOptions, Channel next) { + // ... + } + + @Override + public int getPriority() { + return 10; + } +} +---- + +Interceptors with the highest priority are called first. +The default priority, used if the interceptor does not implement the `Prioritized` interface, is `0`. + diff --git a/docs/src/main/asciidoc/grpc-service-implementation.adoc b/docs/src/main/asciidoc/grpc-service-implementation.adoc index 3b85b3b68ed15..8c1024d0f0003 100644 --- a/docs/src/main/asciidoc/grpc-service-implementation.adoc +++ b/docs/src/main/asciidoc/grpc-service-implementation.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Implementing a gRPC Service @@ -41,7 +41,7 @@ When extending them, you need to override the service methods defined in the ser To implement a gRPC service using the default gRPC API, create a class extending the default implementation base. Then, override the methods defined in the service interface. -Finally, expose the service as a CDI bean using the `@Singleton` annotation: +Finally, implement the service as a CDI bean using the `@Singleton` annotation: [source, java] ---- @@ -65,7 +65,7 @@ public class HelloService extends GreeterGrpc.GreeterImplBase { To implement a gRPC service using the Mutiny gRPC API, create a class extending the Mutiny implementation base. Then, override the methods defined in the service interface. These methods are using Mutiny types. -Finally, expose the service as a CDI bean using the `@Singleton` annotation: +Finally, implement the service as a CDI bean using the `@Singleton` annotation: [source, java] ---- @@ -83,6 +83,21 @@ public class ReactiveHelloService extends MutinyGreeterGrpc.GreeterImplBase { } ---- +== Blocking service implementation + +By default, all the methods from a gRPC service run on the event loop. +As a consequence, you must **not** block. +If your service logic must block, annotate the method with `io.smallrye.common.annotation.Blocking`: + +[source, java] +---- +@Override +@Blocking +public Uni sayHelloBlocking(HelloRequest request) { + // Do something blocking before returning the Uni +} +---- + == Handling streams gRPC allows receiving and returning streams: @@ -135,7 +150,7 @@ public class StreamingService extends MutinyStreamingGrpc.StreamingImplBase { ---- == Health check -For the exposed services, Quarkus gRPC exposes health information in the following format: +For the implemented services, Quarkus gRPC exposes health information in the following format: [source,protobuf] ---- syntax = "proto3"; @@ -169,7 +184,7 @@ For more details, check out the https://github.com/grpc/grpc/blob/v1.28.1/doc/health-checking.md[gRPC documentation] Additionally, if Quarkus SmallRye Health is added to the application, a readiness check for -the state of the gRPC services will be added to the MicroProfile Health endpoint response, that is `/health`. +the state of the gRPC services will be added to the MicroProfile Health endpoint response, that is `/q/health`. == Reflection Service @@ -177,7 +192,7 @@ Quarkus gRPC Server implements the https://github.com/grpc/grpc/blob/master/doc/ This service allows tools like https://github.com/fullstorydev/grpcurl[grpcurl] or https://github.com/gusaul/grpcox[grpcox] to interact with your services. The reflection service is enabled by default in _dev_ mode. -In test or production mode, you need to enable it explicitly by setting `quarkus.grpc-server.enable-reflection-service` to `true`. +In test or production mode, you need to enable it explicitly by setting `quarkus.grpc.server.enable-reflection-service` to `true`. == Scaling By default, quarkus-grpc starts a single gRPC server running on a single event loop. @@ -192,12 +207,15 @@ include::{generated-dir}/config/quarkus-grpc-config-group-config-grpc-server-con === Enabling TLS -To enable TLS, use the following configuration: +To enable TLS, use the following configuration. -[source] +Note that all paths in the configuration may either specify a resource on the classpath +(typically from `src/main/resources` or its subfolder) or an external file. + +[source,properties] ---- -quarkus.grpc-server.ssl.certificate=src/main/resources/tls/server.pem -quarkus.grpc-server.ssl.key=src/main/resources/tls/server.key +quarkus.grpc.server.ssl.certificate=tls/server.pem +quarkus.grpc.server.ssl.key=tls/server.key ---- NOTE: When SSL/TLS is configured, `plain-text` is automatically disabled. @@ -206,11 +224,54 @@ NOTE: When SSL/TLS is configured, `plain-text` is automatically disabled. To use TLS with mutual authentication, use the following configuration: -[source] +[source,properties] +---- +quarkus.grpc.server.ssl.certificate=tls/server.pem +quarkus.grpc.server.ssl.key=tls/server.key +quarkus.grpc.server.ssl.trust-store=tls/ca.jks +quarkus.grpc.server.ssl.trust-store-password=***** +quarkus.grpc.server.ssl.client-auth=REQUIRED +---- + +== Server Interceptors + +You can implement a gRPC server interceptor by implementing an `@ApplicationScoped` bean implementing `io.grpc.ServerInterceptor`: + +[source, java] +---- +@ApplicationScoped +public class MyInterceptor implements ServerInterceptor { + + @Override + public ServerCall.Listener interceptCall(ServerCall serverCall, + Metadata metadata, ServerCallHandler serverCallHandler) { + // ... + } +} ---- -quarkus.grpc-server.ssl.certificate=src/main/resources/tls/server.pem -quarkus.grpc-server.ssl.key=src/main/resources/tls/server.key -quarkus.grpc-server.ssl.trust-store=src/main/resources/tls/ca.jks -quarkus.grpc-server.ssl.trust-store-password=***** -quarkus.grpc-server.ssl.client-auth=REQUIRED + +TIP: Check the https://grpc.github.io/grpc-java/javadoc/io/grpc/ServerInterceptor.html[ServerInterceptor JavaDoc] to properly implement your interceptor. + +When you have multiple server interceptors, you can order them by implementing the `javax.enterprise.inject.spi.Prioritized` interface: + +source, java] ---- +@ApplicationScoped +public class MyInterceptor implements ServerInterceptor, Prioritized { + + @Override + public ServerCall.Listener interceptCall(ServerCall serverCall, + Metadata metadata, ServerCallHandler serverCallHandler) { + // ... + } + + @Override + public int getPriority() { + return 10; + } +} +---- + +Interceptors with the highest priority are called first. +The default priority, used if the interceptor does not implement the `Prioritized` interface, is `0`. + diff --git a/docs/src/main/asciidoc/grpc.adoc b/docs/src/main/asciidoc/grpc.adoc index cbdb6de4e56a7..2e753f6d6a06b 100644 --- a/docs/src/main/asciidoc/grpc.adoc +++ b/docs/src/main/asciidoc/grpc.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus gRPC diff --git a/docs/src/main/asciidoc/hibernate-orm-panache-kotlin.adoc b/docs/src/main/asciidoc/hibernate-orm-panache-kotlin.adoc index 37911591cd285..d91d00d02cdb9 100644 --- a/docs/src/main/asciidoc/hibernate-orm-panache-kotlin.adoc +++ b/docs/src/main/asciidoc/hibernate-orm-panache-kotlin.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Simplified Hibernate ORM with Panache and Kotlin @@ -10,7 +10,7 @@ include::./attributes.adoc[] Hibernate ORM is the de facto standard JPA implementation and is well-known in the Java ecosystem. Panache offers a new layer atop this familiar framework. This guide will not dive in to the specifics of either as those are already -covered in the link:hibernate-orm-panache.adoc[Panache guide]. In this guide, we'll cover the Kotlin specific changes +covered in the link:hibernate-orm-panache[Panache guide]. In this guide, we'll cover the Kotlin specific changes needed to use Panache in your Kotlin-based Quarkus applications. == First: an example @@ -31,12 +31,12 @@ class Person: PanacheEntity { As you can see our entities remain simple. There is, however, a slight difference from the Java version. The Kotlin language doesn't support the notion of static methods in quite the same way as Java does. Instead, we must use a -[companion object](https://kotlinlang.org/docs/tutorials/kotlin-for-py/objects-and-companion-objects.html#companion-objects): +https://kotlinlang.org/docs/tutorials/kotlin-for-py/objects-and-companion-objects.html#companion-objects[companion object]: [source,kotlin] ---- @Entity -class Person : PanacheEntity { +class Person : PanacheEntity() { companion object: PanacheCompanion { // <1> fun findByName(name: String) = find("name", name).firstResult() fun findAlive() = list("status", Status.Alive) @@ -89,6 +89,7 @@ by making them implement `PanacheRepository`: [source,kotlin] ---- +@ApplicationScoped class PersonRepository: PanacheRepository { fun findByName(name: String) = find("name", name).firstResult() fun findAlive() = list("status", Status.Alive) diff --git a/docs/src/main/asciidoc/hibernate-orm-panache.adoc b/docs/src/main/asciidoc/hibernate-orm-panache.adoc index fa50fb508bd1f..91072d469ba16 100644 --- a/docs/src/main/asciidoc/hibernate-orm-panache.adoc +++ b/docs/src/main/asciidoc/hibernate-orm-panache.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Simplified Hibernate ORM with Panache @@ -341,9 +341,9 @@ Once you have written your repository, here are the most common operations you w ---- // creating a person Person person = new Person(); -person.name = "Stef"; -person.birth = LocalDate.of(1910, Month.FEBRUARY, 1); -person.status = Status.Alive; +person.setName("Stef"); +person.setBirth(LocalDate.of(1910, Month.FEBRUARY, 1)); +person.setStatus(Status.Alive); // persist it personRepository.persist(person); @@ -618,6 +618,43 @@ so the compiler must be configured to store parameter names inside the compiled This is enabled by default if you are using the Quarkus Maven archetype. If you are not using it, add the property `true` to your pom.xml. ==== +If in the DTO projection object you have a field from a referenced entity, you can use the `@ProjectedFieldName` annotation to provide the path for the SELECT statement. + +[source,java] +---- +@Entity +public class Dog extends PanacheEntity { + public String name; + public String race; + @ManyToOne + public Person owner; +} + +@RegisterForReflection +public class DogDto { + public String name; + public String ownerName; + + public DogDto(String name, @ProjectedFieldName("owner.name") String ownerName) { // <1> + this.name = name; + this.ownerName = ownerName; + } +} + +PanacheQuery query = Dog.findAll().project(DogDto.class); +---- + +1. The `ownerName` DTO constructor's parameter will be loaded from the `owner.name` HQL property. + +== Multiple Persistence Units + +The support for multiple persistence units is described in detail in link:hibernate-orm#multiple-persistence-units[the Hibernate ORM guide]. + +When using Panache, things are simple: + +* A given Panache entity can be attached to only a single persistence unit. +* Given that, Panache already provides the necessary plumbing to transparently find the appropriate `EntityManager` associated to a Panache entity. + == Transactions Make sure to wrap methods modifying your database (e.g. `entity.persist()`) within a transaction. Marking a @@ -776,11 +813,11 @@ public class PanacheFunctionalityTest { Assertions.assertEquals(0, Person.count()); // Now let's specify the return value - Mockito.when(Person.count()).thenReturn(23l); + Mockito.when(Person.count()).thenReturn(23L); Assertions.assertEquals(23, Person.count()); // Now let's change the return value - Mockito.when(Person.count()).thenReturn(42l); + Mockito.when(Person.count()).thenReturn(42L); Assertions.assertEquals(42, Person.count()); // Now let's call the original method @@ -792,13 +829,13 @@ public class PanacheFunctionalityTest { // Mock only with specific parameters Person p = new Person(); - Mockito.when(Person.findById(12l)).thenReturn(p); - Assertions.assertSame(p, Person.findById(12l)); - Assertions.assertNull(Person.findById(42l)); + Mockito.when(Person.findById(12L)).thenReturn(p); + Assertions.assertSame(p, Person.findById(12L)); + Assertions.assertNull(Person.findById(42L)); // Mock throwing - Mockito.when(Person.findById(12l)).thenThrow(new WebApplicationException()); - Assertions.assertThrows(WebApplicationException.class, () -> Person.findById(12l)); + Mockito.when(Person.findById(12L)).thenThrow(new WebApplicationException()); + Assertions.assertThrows(WebApplicationException.class, () -> Person.findById(12L)); // We can even mock your custom methods Mockito.when(Person.findOrdered()).thenReturn(Collections.emptyList()); @@ -895,11 +932,11 @@ public class PanacheFunctionalityTest { Assertions.assertEquals(0, personRepository.count()); // Now let's specify the return value - Mockito.when(personRepository.count()).thenReturn(23l); + Mockito.when(personRepository.count()).thenReturn(23L); Assertions.assertEquals(23, personRepository.count()); // Now let's change the return value - Mockito.when(personRepository.count()).thenReturn(42l); + Mockito.when(personRepository.count()).thenReturn(42L); Assertions.assertEquals(42, personRepository.count()); // Now let's call the original method @@ -911,13 +948,13 @@ public class PanacheFunctionalityTest { // Mock only with specific parameters Person p = new Person(); - Mockito.when(personRepository.findById(12l)).thenReturn(p); - Assertions.assertSame(p, personRepository.findById(12l)); - Assertions.assertNull(personRepository.findById(42l)); + Mockito.when(personRepository.findById(12L)).thenReturn(p); + Assertions.assertSame(p, personRepository.findById(12L)); + Assertions.assertNull(personRepository.findById(42L)); // Mock throwing - Mockito.when(personRepository.findById(12l)).thenThrow(new WebApplicationException()); - Assertions.assertThrows(WebApplicationException.class, () -> personRepository.findById(12l)); + Mockito.when(personRepository.findById(12L)).thenThrow(new WebApplicationException()); + Assertions.assertThrows(WebApplicationException.class, () -> personRepository.findById(12L)); Mockito.when(personRepository.findOrdered()).thenReturn(Collections.emptyList()); Assertions.assertTrue(personRepository.findOrdered().isEmpty()); @@ -1001,4 +1038,4 @@ yourself, or add the `quarkus-panache-common` as well, as shown below: ----- \ No newline at end of file +---- diff --git a/docs/src/main/asciidoc/hibernate-orm.adoc b/docs/src/main/asciidoc/hibernate-orm.adoc index 8b3a9427d0f57..efa82492f36e7 100644 --- a/docs/src/main/asciidoc/hibernate-orm.adoc +++ b/docs/src/main/asciidoc/hibernate-orm.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Hibernate ORM and JPA @@ -40,7 +40,7 @@ Add the following dependencies to your project: [source,xml] .Example dependencies using Maven --- +---- @@ -54,14 +54,14 @@ Add the following dependencies to your project: quarkus-jdbc-postgresql --- +---- Annotate your persistent objects with `@Entity`, then add the relevant configuration properties in `{config-file}`. [source,properties] .Example `{config-file}` --- +---- # datasource configuration quarkus.datasource.db-kind = postgresql quarkus.datasource.username = hibernate @@ -70,12 +70,12 @@ quarkus.datasource.jdbc.url = jdbc:postgresql://localhost:5432/hibernate_db # drop and create the database at startup (use `update` to only update the schema) quarkus.hibernate-orm.database.generation=drop-and-create --- +---- -Note that these configuration properties are not the same ones as in your typical Hibernate ORM configuration file: these drive Quarkus configuration properties, -which often will map to Hibernate configuration properties but could have different names and don't necessarily map 1:1 to each other. +Note that these configuration properties are not the same ones as in your typical Hibernate ORM configuration file. +They will often map to Hibernate ORM configuration properties but could have different names and don't necessarily map 1:1 to each other. -Also, Quarkus will set many Hibernate configuration settings automatically, and will often use more modern defaults. +Also, Quarkus will set many Hibernate ORM configuration settings automatically, and will often use more modern defaults. Please see below section <> for the list of properties you can set in `{config-file}`. @@ -87,7 +87,7 @@ You can then happily inject your `EntityManager`: [source,java] .Example application bean using Hibernate --- +---- @ApplicationScoped public class SantaClausService { @Inject @@ -100,20 +100,22 @@ public class SantaClausService { em.persist(gift); } } --- +---- <1> Inject your entity manager and have fun <2> Mark your CDI bean method as `@Transactional` and the `EntityManager` will enlist and flush at commit. [source,java] .Example Entity --- +---- @Entity public class Gift { private Long id; private String name; - @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator="giftSeq") + @Id + @SequenceGenerator(name = "giftSeq", sequenceName = "gift_id_seq", allocationSize = 1, initialValue = 1) + @GeneratedValue(generator = "giftSeq") public Long getId() { return id; } @@ -130,10 +132,12 @@ public class Gift { this.name = name; } } --- +---- + +To load SQL statements when Hibernate ORM starts, add an `import.sql` file to the root of your resources directory. +This script can contain any SQL DML statements. +Make sure to terminate each statement with a semicolon. -To load some SQL statements when Hibernate ORM starts, add an `import.sql` in the root of your resources directory. -Such a script can contain any SQL DML statements; make sure to terminate each statement with a semicolon. This is useful to have a data set ready for your tests or demos. WARNING: Make sure to wrap methods modifying your database (e.g. `entity.persist()`) within a transaction. Marking a @@ -165,20 +169,187 @@ Make up your mind on which approach you want to use. ==== Want to start a PostgreSQL server on the side with Docker? -[source,shell] --- -docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 \ - --name postgres-quarkus-hibernate -e POSTGRES_USER=hibernate \ +[source,bash] +---- +docker run --rm=true --name postgres-quarkus-hibernate -e POSTGRES_USER=hibernate \ -e POSTGRES_PASSWORD=hibernate -e POSTGRES_DB=hibernate_db \ - -p 5432:5432 postgres:10.5 --- + -p 5432:5432 postgres:13.1 +---- This will start a non-durable empty database: ideal for a quick experiment! ==== +[[multiple-persistence-units]] +=== Multiple persistence units + +==== Setting up multiple persistence units + +It is possible to define multiple persistence units using the Quarkus configuration properties. + +The properties at the root of the `quarkus.hibernate-orm.` namespace define the default persistence unit. +For instance, the following snippet defines a default datasource and a default persistence unit: + +[source,properties] +---- +quarkus.datasource.db-kind=h2 +quarkus.datasource.jdbc.url=jdbc:h2:mem:default;DB_CLOSE_DELAY=-1 + +quarkus.hibernate-orm.dialect=org.hibernate.dialect.H2Dialect +quarkus.hibernate-orm.database.generation=drop-and-create +---- + +Using a map based approach, it is possible to define named persistence units: + +[source,properties] +---- +quarkus.datasource."users".db-kind=h2 <1> +quarkus.datasource."users".jdbc.url=jdbc:h2:mem:users;DB_CLOSE_DELAY=-1 + +quarkus.datasource."inventory".db-kind=h2 <2> +quarkus.datasource."inventory".jdbc.url=jdbc:h2:mem:inventory;DB_CLOSE_DELAY=-1 + +quarkus.hibernate-orm."users".database.generation=drop-and-create <3> +quarkus.hibernate-orm."users".datasource=users <4> +quarkus.hibernate-orm."users".packages=org.acme.model.user <5> + +quarkus.hibernate-orm."inventory".database.generation=drop-and-create <6> +quarkus.hibernate-orm."inventory".datasource=inventory +quarkus.hibernate-orm."inventory".packages=org.acme.model.inventory +---- +<1> Define a datasource named `users`. +<2> Define a datasource named `inventory`. +<3> Define a persistence unit called `users`. +<4> Define the datasource used by the persistence unit. +<5> This configuration property is important but we will discuss it a bit later. +<6> Define a persistence unit called `inventory` pointing to the `inventory` datasource. + +[NOTE] +==== +You can mix the default datasource and named datasources or only have one or the other. +==== + +[NOTE] +==== +The default persistence unit points to the default datasource by default. +For named persistence units, the `datasource` property is mandatory. +You can point your persistence unit to the default datasource by setting it to `` +(which is the internal name of the default datasource). + +It is perfectly valid to have several persistence units pointing to the same datasource. +==== + +[[multiple-persistence-units-attaching-model-classes]] +==== Attaching model classes to persistence units + +There are two ways to attach model classes to persistence units, and they should not be mixed: + +* Via the `packages` configuration property; +* Via the `@io.quarkus.hibernate.orm.PersistenceUnit` package-level annotation. + +If both are mixed, the annotations are ignored and only the `packages` configuration properties are taken into account. + +Using the `packages` configuration property is simple: + +[source,properties] +---- +quarkus.hibernate-orm.database.generation=drop-and-create +quarkus.hibernate-orm.packages=org.acme.model.defaultpu + +quarkus.hibernate-orm."users".database.generation=drop-and-create +quarkus.hibernate-orm."users".datasource=users +quarkus.hibernate-orm."users".packages=org.acme.model.user +---- + +This configuration snippet will create two persistence units: + +* The default one which will contain all the model classes under the `org.acme.model.defaultpu` package, subpackages included. +* A named `users` persistence unit which will contain all the model classes under the `org.acme.model.user` package, subpackages included. + +You can attach several packages to a persistence unit: + +[source,properties] +---- +quarkus.hibernate-orm."users".packages=org.acme.model.shared,org.acme.model.user +---- + +All the model classes under the `org.acme.model.shared` and `org.acme.model.user` packages will be attached to the `users` persistence unit. + +It is also supported to attach a given model class to several persistence units. + +[NOTE] +==== +Model classes need to be consistently added to a given persistence unit. +That meant that all dependent model classes of a given entity (mapped super classes, embeddables...) are required to be attached to the persistence unit. +As we are dealing with the persistence unit at the package level, it should be simple enough. +==== + +[WARNING] +==== +Panache entities can be attached to only one persistence unit. + +For entities attached to several persistence units, you cannot use Panache. +You can mix the two approaches though and mix Panache entities and traditional entities where multiple persistence units are required. + +If you have a use case for that and clever ideas about how to implement it without cluttering the simplified Panache approach, +contact us on the link:{quarkus-mailing-list-index}[quarkus-dev mailing list]. +==== + +The second approach to attach model classes to a persistence unit is to use package-level `@io.quarkus.hibernate.orm.PersistenceUnit` annotations. +Again, the two approaches cannot be mixed. + +To obtain a configuration similar to the one above with the `packages` configuration property, create a `package-info.java` file with the following content: + +[source,java] +---- +@PersistenceUnit("users") <1> +package org.acme.model.user; + +import io.quarkus.hibernate.orm.PersistenceUnit; +---- +<1> Be careful, use the `@io.quarkus.hibernate.orm.PersistenceUnit` annotation, not the JPA one. + +[CAUTION] +==== +We only support defining the `@PersistenceUnit` for model classes at the package level, +using the `@PersistenceUnit` annotation at the class level is not supported in this case. +==== + +Note that, similarly to what we do with the configuration property, we take into account the annotated package but also all its subpackages. + +==== CDI integration + +If you are familiar with using Hibernate ORM in Quarkus, you probably already have injected the `EntityManager` using CDI: + +[source,java] +---- +@Inject +EntityManager entityManager; +---- + +This will inject the `EntityManager` of the default persistence unit. + +Injecting the `EntityManager` of a named persistence unit (`users` in our example) is as simple as: + +[source,java] +---- +@Inject +@PersistenceUnit("users") <1> +EntityManager entityManager; +---- +<1> Here again, we use the same `@io.quarkus.hibernate.orm.PersistenceUnit` annotation. + +You can inject the `EntityManagerFactory` of a named persistence unit using the exact same mechanism: + +[source,java] +---- +@Inject +@PersistenceUnit("users") +EntityManagerFactory entityManagerFactory; +---- + == Setting up and configuring Hibernate ORM with a `persistence.xml` -Alternatively, you can use a `META-INF/persistence.xml` to setup Hibernate ORM. +Alternatively, you can use a `META-INF/persistence.xml` to set up Hibernate ORM. This is useful for: * migrating existing code @@ -186,17 +357,17 @@ This is useful for: * or if you like it the good old way [NOTE] --- +==== If you have a `persistence.xml`, then you cannot use the `quarkus.hibernate-orm.*` properties and only persistence units defined in `persistence.xml` will be taken into account. --- +==== Your `pom.xml` dependencies as well as your Java code would be identical to the precedent example. The only difference is that you would specify your Hibernate ORM configuration in `META-INF/persistence.xml`: [source,xml] .Example persistence.xml resource --- +---- io.quarkus quarkus-hibernate-envers --- +---- At this point the extension does not expose additional configuration properties. @@ -477,9 +650,10 @@ For more information about Hibernate Envers, see link:https://hibernate.org/orm/ [[metrics]] == Metrics -The SmallRye Metrics extension is capable of exposing metrics that Hibernate ORM collects at runtime. To enable exposure of Hibernate metrics -on the `/metrics` endpoint, make sure your project depends on the `quarkus-smallrye-metrics` artifact and set the configuration property `quarkus.hibernate-orm.metrics.enabled` to `true`. -Metrics will then be available under the `vendor` scope. +Either link:micrometer[Micrometer] or link:microprofile-metrics[SmallRye Metrics] are +capable of exposing metrics that Hibernate ORM collects at runtime. To enable exposure of Hibernate metrics +on the `/q/metrics` endpoint, make sure your project depends on a metrics extension and set the configuration property `quarkus.hibernate-orm.metrics.enabled` to `true`. +When using link:microprofile-metrics[SmallRye Metrics], metrics will be available under the `vendor` scope. == Limitations and other things you should know @@ -498,6 +672,13 @@ applications perform better. Typically you would need to adapt your build scripts to include the Hibernate Enhancement plugins; in Quarkus this is not necessary as the enhancement step is integrated in the build and analysis of the Quarkus application. +[WARNING] +==== +Due to the usage of enhancement, using the `clone()` method on entities is currently not supported +as it will also clone some enhancement-specific fields that are specific to the entity. + +This limitation might be removed in the future. +==== === Automatic integration @@ -542,10 +723,6 @@ JPA Callbacks:: Annotations allowing for application callbacks on entity lifecycle events defined by JPA such as `@javax.persistence.PostUpdate`, `@javax.persistence.PostLoad`, `@javax.persistence.PostPersist`, etc... are currently not processed. This limitation could be resolved in a future version, depending on user demand. -Single instance:: -It is currently not possible to configure more than one instance of Hibernate ORM. -This is a temporary limitation, the team is working on it - please be patient! - === Other notable differences Format of `import.sql`:: @@ -564,6 +741,7 @@ it's implemented by the Agroal connection pool extension for Quarkus. Jump over to link:datasource[Quarkus - Datasources] for all details. +[[multitenancy]] == Multitenancy "The term multitenancy, in general, is applied to software development to indicate an architecture in which a single running instance of an application simultaneously serves multiple clients (tenants). This is highly common in SaaS solutions. Isolating information (data, customizations, etc.) pertaining to the various tenants is a particular challenge in these systems. This includes the data owned by each tenant stored in the database" (link:https://docs.jboss.org/hibernate/orm/5.4/userguide/html_single/Hibernate_User_Guide.html#multitenacy[Hibernate User Guide]). @@ -579,14 +757,10 @@ Let's start by implementing the `/{tenant}` endpoint. As you can see from the so import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.persistence.EntityManager; -import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; @ApplicationScoped -@Produces("application/json") -@Consumes("application/json") @Path("/{tenant}") public class FruitResource { @@ -609,13 +783,10 @@ In order to resolve the tenant from incoming requests and map it to a specific t ---- import javax.enterprise.context.ApplicationScoped; -import io.quarkus.arc.Arc; -import io.quarkus.arc.Unremovable; import io.quarkus.hibernate.orm.runtime.tenant.TenantResolver; import io.vertx.ext.web.RoutingContext; -@RequestScoped -@Unremovable +@RequestScoped <1> public class CustomTenantResolver implements TenantResolver { @Inject @@ -641,14 +812,62 @@ public class CustomTenantResolver implements TenantResolver { } ---- +<1> The bean is made `@RequestScoped` as the tenant resolution depends on the incoming request. From the implementation above, tenants are resolved from the request path so that in case no tenant could be inferred, the default tenant identifier is returned. +[NOTE] +==== +If <> are used, you need to qualify the `TenantResolver` with the persistence unit name: + +[source,java] +---- +import javax.enterprise.context.ApplicationScoped; + +import io.quarkus.hibernate.orm.PersistenceUnit; +import io.quarkus.hibernate.orm.runtime.tenant.TenantResolver; +import io.vertx.ext.web.RoutingContext; + +@PersistenceUnit("name-of-persistence-unit") <1> +@RequestScoped +public class CustomTenantResolver implements TenantResolver { + + // ... + +} +---- +<1> Make sure to use the `@io.quarkus.hibernate.orm.PersistenceUnit` annotation, not the JPA one. +==== + +[NOTE] +==== +If you also use link:security-openid-connect-multitenancy[OIDC multitenancy] and both OIDC and Hibernate ORM tenant IDs are the same and must be extracted from the Vert.x `RoutingContext` then you can pass the tenant id from the OIDC Tenant Resolver to the Hibernate ORM Tenant Resolver as a `RoutingContext` attribute, for example: + +[source,java] +---- +import io.quarkus.hibernate.orm.runtime.tenant.TenantResolver; +import io.vertx.ext.web.RoutingContext; + +@RequestScoped +public class CustomTenantResolver implements TenantResolver { + + @Inject + RoutingContext context; + ... + @Override + public String resolveTenantId() { + // OIDC TenantResolver has already calculated the tenant id and saved it as a RoutingContext `tenantId` attribute: + return context.get("tenantId"); + } +} +---- +==== + === Configuring the application In general it is not possible to use the Hibernate ORM database generation feature in conjunction with a multitenancy setup. Therefore you have to disable it and you need to make sure that the tables are created per schema. -The following setup will use the link:https://quarkus.io/guides/flyway[Flyway] extension to achieve this goal. +The following setup will use the link:flyway[Flyway] extension to achieve this goal. ==== SCHEMA approach diff --git a/docs/src/main/asciidoc/hibernate-search-elasticsearch.adoc b/docs/src/main/asciidoc/hibernate-search-elasticsearch.adoc deleted file mode 100644 index 117b4854b954f..0000000000000 --- a/docs/src/main/asciidoc/hibernate-search-elasticsearch.adoc +++ /dev/null @@ -1,700 +0,0 @@ -//// -This guide is maintained in the main Quarkus repository -and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc -//// -= Quarkus - Hibernate Search guide -:extension-status: preview -include::./attributes.adoc[] - -You have a Hibernate ORM-based application? You want to provide a full-featured full-text search to your users? You're at the right place. - -With this guide, you'll learn how to synchronize your entities to an Elasticsearch cluster in a heart beat with Hibernate Search. -We will also explore how you can can query your Elasticsearch cluster using the Hibernate Search API. - -include::./status-include.adoc[] - -[WARNING] -==== -This extension is based on a beta version of Hibernate Search. -While APIs are quite stable and the code is of production quality and thoroughly tested, some features are still missing, performance might not be optimal and some APIs or configuration properties might change as the extension matures. -==== - -== Prerequisites - -To complete this guide, you need: - -* less than 20 minutes -* an IDE -* JDK 1.8+ installed with `JAVA_HOME` configured appropriately -* Apache Maven {maven-version} -* Docker -* link:building-native-image[GraalVM installed if you want to run in native mode] - -== Architecture - -The application described in this guide allows to manage a (simple) library: you manage authors and their books. - -The entities are stored in a PostgreSQL database and indexed in an Elasticsearch cluster. - -== Solution - -We recommend that you follow the instructions in the next sections and create the application step by step. -However, you can go right to the completed example. - -Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. - -The solution is located in the `hibernate-search-elasticsearch-quickstart` {quickstarts-tree-url}/hibernate-search-elasticsearch-quickstart[directory]. - -[NOTE] -==== -The provided solution contains a few additional elements such as tests and testing infrastructure. -==== - -== Creating the Maven project - -First, we need a new project. Create a new project with the following command: - -[source,shell,subs=attributes+] ----- -mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ - -DprojectGroupId=org.acme \ - -DprojectArtifactId=hibernate-search-elasticsearch-quickstart \ - -DclassName="org.acme.hibernate.search.elasticsearch.LibraryResource" \ - -Dpath="/library" \ - -Dextensions="hibernate-orm-panache, hibernate-search-elasticsearch, resteasy-jsonb, jdbc-postgresql" -cd hibernate-search-elasticsearch-quickstart ----- - -This command generates a Maven structure importing the following extensions: - - * Hibernate ORM with Panache, - * the PostgreSQL JDBC driver, - * Hibernate Search + Elasticsearch, - * RESTEasy and JSON-B. - -If you already have your Quarkus project configured, you can add the `hibernate-search-elasticsearch` extension -to your project by running the following command in your project base directory: - -[source,bash] ----- -./mvnw quarkus:add-extension -Dextensions="hibernate-search-elasticsearch" ----- - -This will add the following to your `pom.xml`: - -[source,xml] ----- - - io.quarkus - quarkus-hibernate-search-elasticsearch - ----- - -[IMPORTANT] -==== -For now, let's delete the two generated tests `LibraryResourceTest` and `NativeLibraryResourceIT` present in `src/test/java`. -If you are interested in how you can test this application, just refer to the solution in the quickstarts Git repository: -it contains a lot of tests and the required testing infrastructure. -==== - -== Creating the bare entities - -First, let's create our Hibernate ORM entities `Book` and `Author` in the `model` subpackage. - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch.model; - -import java.util.List; -import java.util.Objects; - -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.OneToMany; - -import io.quarkus.hibernate.orm.panache.PanacheEntity; - -@Entity -public class Author extends PanacheEntity { // <1> - - public String firstName; - - public String lastName; - - @OneToMany(mappedBy = "author", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) // <2> - public List books; - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof Author)) { - return false; - } - - Author other = (Author) o; - - return Objects.equals(id, other.id); - } - - @Override - public int hashCode() { - return 31; - } -} ----- -<1> We are using Hibernate ORM with Panache, it is not mandatory. -<2> We are loading these elements eagerly so that they are present in the JSON output. -In a real world application, you should probably use a DTO approach. - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch.model; - -import java.util.Objects; - -import javax.json.bind.annotation.JsonbTransient; -import javax.persistence.Entity; -import javax.persistence.ManyToOne; - -import io.quarkus.hibernate.orm.panache.PanacheEntity; - -@Entity -public class Book extends PanacheEntity { - - public String title; - - @ManyToOne - @JsonbTransient // <1> - public Author author; - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof Book)) { - return false; - } - - Book other = (Book) o; - - return Objects.equals(id, other.id); - } - - @Override - public int hashCode() { - return 31; - } -} ----- -<1> We mark this property with `@JsonbTransient` to avoid infinite loops when serializing with JSON-B. - -== Initializing the REST service - -While everything is not yet set up for our REST service, we can initialize it with the standard CRUD operations we will need. - -Just copy this content in the `LibraryResource` file created by the Maven `create-project` command: - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch; - -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.transaction.Transactional; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - -import org.acme.hibernate.search.elasticsearch.model.Author; -import org.acme.hibernate.search.elasticsearch.model.Book; -import org.jboss.resteasy.annotations.jaxrs.FormParam; -import org.jboss.resteasy.annotations.jaxrs.PathParam; - -@Path("/library") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -public class LibraryResource { - - @Inject - EntityManager em; - - @PUT - @Path("book") - @Transactional - @Consumes(MediaType.APPLICATION_FORM_URLENCODED) - public void addBook(@FormParam String title, @FormParam Long authorId) { - Author author = Author.findById(authorId); - if (author == null) { - return; - } - - Book book = new Book(); - book.title = title; - book.author = author; - book.persist(); - - author.books.add(book); - author.persist(); - } - - @DELETE - @Path("book/{id}") - @Transactional - public void deleteBook(@PathParam Long id) { - Book book = Book.findById(id); - if (book != null) { - book.author.books.remove(book); - book.delete(); - } - } - - @PUT - @Path("author") - @Transactional - @Consumes(MediaType.APPLICATION_FORM_URLENCODED) - public void addAuthor(@FormParam String firstName, @FormParam String lastName) { - Author author = new Author(); - author.firstName = firstName; - author.lastName = lastName; - author.persist(); - } - - @POST - @Path("author/{id}") - @Transactional - @Consumes(MediaType.APPLICATION_FORM_URLENCODED) - public void updateAuthor(@PathParam Long id, @FormParam String firstName, @FormParam String lastName) { - Author author = Author.findById(id); - if (author == null) { - return; - } - author.firstName = firstName; - author.lastName = lastName; - author.persist(); - } - - @DELETE - @Path("author/{id}") - @Transactional - public void deleteAuthor(@PathParam Long id) { - Author author = Author.findById(id); - if (author != null) { - author.delete(); - } - } -} ----- - -Nothing out of the ordinary here: it is just good old Hibernate ORM with Panache operations in a standard JAX-RS service. - -In fact, the interesting part is that we will need to add very few elements to make our full text search application working. - -== Using Hibernate Search annotations - -Let's go back to our entities. - -Enabling full text search capabilities for them is as simple as adding a few annotations. - -Let's edit the `Author` entity again to include this content: - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch.model; - -import java.util.Objects; - -import javax.json.bind.annotation.JsonbTransient; -import javax.persistence.Entity; -import javax.persistence.ManyToOne; - -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; - -import io.quarkus.hibernate.orm.panache.PanacheEntity; - -@Entity -@Indexed // <1> -public class Book extends PanacheEntity { - - @FullTextField(analyzer = "english") // <2> - public String title; - - @ManyToOne - @JsonbTransient - public Author author; - - // Preexisting equals()/hashCode() methods -} ----- -<1> First, let's use the `@Indexed` annotation to register our `Book` entity as part of the full text index. -<2> The `@FullTextField` annotation declares a field in the index specifically tailored for full text search. -In particular, we have to define an analyzer to split and analyze the tokens (~ words) - more on this later. - -Now that our books are indexed, we can do the same for the authors. - -Open the `Author` class and include the content below. - -Things are quite similar here: we use the `@Indexed`, `@FullTextField` and `@KeywordField` annotations. - -There are a few differences/additions though. Let's check them out. - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch.model; - -import java.util.List; -import java.util.Objects; - -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.OneToMany; - -import org.hibernate.search.engine.backend.types.Sortable; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.KeywordField; - -import io.quarkus.hibernate.orm.panache.PanacheEntity; - -@Entity -@Indexed -public class Author extends PanacheEntity { - - @FullTextField(analyzer = "name") // <1> - @KeywordField(name = "firstName_sort", sortable = Sortable.YES, normalizer = "sort") // <2> - public String firstName; - - @FullTextField(analyzer = "name") - @KeywordField(name = "lastName_sort", sortable = Sortable.YES, normalizer = "sort") - public String lastName; - - @OneToMany(mappedBy = "author", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) - @IndexedEmbedded // <3> - public List books; - - // Preexisting equals()/hashCode() methods -} ----- -<1> We use a `@FullTextField` similar to what we did for `Book` but you'll notice that the analyzer is different - more on this later. -<2> As you can see, we can define several fields for the same property. -Here, we define a `@KeywordField` with a specific name. -The main difference is that a keyword field is not tokenized (the string is kept as one single token) but can be normalized (i.e. filtered) - more on this later. -This field is marked as sortable as our intention is to use it for sorting our authors. -<3> The purpose of `@IndexedEmbedded` is to include the `Book` fields into the `Author` index. -In this case, we just use the default configuration: all the fields of the associated `Book` entities are included in the index (i.e. the `title` field). -The nice thing with `@IndexedEmbedded` is that it is able to automatically reindex an `Author` if one of its ``Book``s has been updated thanks to the bidirectional relation. -`@IndexedEmbedded` also supports nested documents (using the `storage = NESTED` attribute) but we don't need it here. -You can also specify the fields you want to include in your parent index using the `includePaths` attribute if you don't want them all. - -== Analyzers and normalizers - -=== Introduction - -Analysis is a big part of full text search: it defines how text will be processed when indexing or building search queries. - -The role of analyzers is to split the text into tokens (~ words) and filter them (making it all lowercase and removing accents for instance). - -Normalizers are a special type of analyzers that keeps the input as a single token. -It is especially useful for sorting or indexing keywords. - -There are a lot of bundled analyzers but you can also develop your own for your own specific purposes. - -You can learn more about the Elasticsearch analysis framework in the https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis.html[Analysis section of the Elasticsearch documentation]. - -=== Defining the analyzers used - -When we added the Hibernate Search annotations to our entities, we defined the analyzers and normalizers used. -Typically: - -[source,java] ----- -@FullTextField(analyzer = "english") ----- - -[source,java] ----- -@FullTextField(analyzer = "name") ----- - -[source,java] ----- -@KeywordField(name = "lastName_sort", sortable = Sortable.YES, normalizer = "sort") ----- - -We use: - - * an analyzer called `name` for person names, - * an analyzer called `english` for book titles, - * a normalizer called `sort` for our sort fields - -but we haven't set them up yet. - -Let's see how you can do it with Hibernate Search. - -=== Setting up the analyzers - -It is an easy task, we just need to create an implementation of `ElasticsearchAnalysisConfigurer` -(and configure Quarkus to use it, more on that later). - -To fulfill our requirements, let's create the following implementation: - -[source,java] ----- -package org.acme.hibernate.search.elasticsearch.config; - -import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurationContext; -import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurer; - -public class AnalysisConfigurer implements ElasticsearchAnalysisConfigurer { - - @Override - public void configure(ElasticsearchAnalysisConfigurationContext context) { - context.analyzer("name").custom() // <1> - .tokenizer("standard") - .tokenFilters("asciifolding", "lowercase"); - - context.analyzer("english").custom() // <2> - .tokenizer("standard") - .tokenFilters("asciifolding", "lowercase", "porter_stem"); - - context.normalizer("sort").custom() // <3> - .tokenFilters("asciifolding", "lowercase"); - } -} ----- -<1> This is a simple analyzer separating the words on spaces, removing any non-ASCII characters by its ASCII counterpart (and thus removing accents) and putting everything in lowercase. -It is used in our examples for the author's names. -<2> We are a bit more aggressive with this one and we include some stemming: we will be able to search for `mystery` and get a result even if the indexed input contains `mysteries`. -It is definitely too aggressive for person names but it is perfect for the book titles. -<3> Here is the normalizer used for sorting. Very similar to our first analyzer, except we don't tokenize the words as we want one and only one token. - -== Adding full text capabilities to our REST service - -In our existing `LibraryResource`, we just need to inject the following methods (and add a few ``import``s): - -[source,java] ----- - @Transactional // <1> - void onStart(@Observes StartupEvent ev) throws InterruptedException { // <2> - // only reindex if we imported some content - if (Book.count() > 0) { - Search.session(em) - .massIndexer() - .startAndWait(); - } - } - - @GET - @Path("author/search") // <3> - @Transactional - public List searchAuthors(@QueryParam String pattern, // <4> - @QueryParam Optional size) { - return Search.session(em) // <5> - .search(Author.class) // <6> - .where(f -> - pattern == null || pattern.trim().isEmpty() ? - f.matchAll() : // <7> - f.simpleQueryString() - .fields("firstName", "lastName", "books.title").matching(pattern) // <8> - ) - .sort(f -> f.field("lastName_sort").then().field("firstName_sort")) // <9> - .fetchHits(size.orElse(20)); // <10> - } ----- -<1> Important point: we need a transactional context for these methods. -<2> As we will import data into the PostgreSQL database using an SQL script, we need to reindex the data at startup. -For this, we use Hibernate Search's mass indexer, which allows to index a lot of data efficiently (you can fine tune it for better performances). -All the upcoming updates coming through Hibernate ORM operations will be synchronized automatically to the full text index. -If you don't import data manually in the database, you don't need that: -the mass indexer should then only be used when you change your indexing configuration (adding a new field, changing an analyzer's configuration...) and you want the new configuration to be applied to your existing entities. -<3> This is where the magic begins: just adding the annotations to our entities makes them available for full text search: we can now query the index using the Hibernate Search DSL. -<4> Use the `org.jboss.resteasy.annotations.jaxrs.QueryParam` annotation type to avoid repeating the parameter name. -<5> First, we get an Hibernate Search session from the injected entity manager. -<6> We indicate that we are searching for ``Author``s. -<7> We create a predicate: if the pattern is empty, we use a `matchAll()` predicate. -<8> If we have a valid pattern, we create a https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html[`simpleQueryString()`] predicate on the `firstName`, `lastName` and `books.title` fields matching our pattern. -<9> We define the sort order of our results. Here we sort by last name, then by first name. Note that we use the specific fields we created for sorting. -<10> Fetch the `size` top hits, `20` by default. Obviously, paging is also supported. - -[NOTE] -==== -The Hibernate Search DSL supports a significant subset of the Elasticsearch predicates (match, range, nested, phrase, spatial...). -Feel free to explore the DSL using autocompletion. - -When that's not enough, you can always fall back to -link:https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#search-dsl-predicate-extensions-elasticsearch-from-json[defining a predicate using JSON directly]. -==== - -== Configuring the application - -As usual, we can configure everything in the Quarkus configuration file, `application.properties`. - -Edit `src/main/resources/application.properties` and inject the following configuration: - -[source,properties] ----- -quarkus.ssl.native=false <1> - -quarkus.datasource.db-kind=postgresql <2> -quarkus.datasource.username=quarkus_test -quarkus.datasource.password=quarkus_test -quarkus.datasource.jdbc.url=jdbc:postgresql:quarkus_test - -quarkus.hibernate-orm.database.generation=drop-and-create <3> -quarkus.hibernate-orm.sql-load-script=import.sql <4> - -quarkus.hibernate-search.elasticsearch.version=7 <5> -quarkus.hibernate-search.elasticsearch.analysis.configurer=org.acme.hibernate.search.elasticsearch.config.AnalysisConfigurer <6> -quarkus.hibernate-search.schema-management.strategy=drop-and-create <7> -quarkus.hibernate-search.elasticsearch.index-defaults.schema-management.required-status=yellow <8> -quarkus.hibernate-search.automatic-indexing.synchronization.strategy=sync <9> ----- -<1> We won't use SSL so we disable it to have a more compact native executable. -<2> Let's create a PostgreSQL datasource. -<3> We will drop and recreate the schema every time we start the application. -<4> We load some initial data. -<5> We need to tell Hibernate Search about the version of Elasticsearch we will use. -It is important because there are significant differences between Elasticsearch mapping syntax depending on the version. -Since the mapping is created at build time to reduce startup time, Hibernate Search cannot connect to the cluster to automatically detect the version. -<6> We point to the custom `AnalysisConfigurer` which defines the configuration of our analyzers and normalizers. -<7> Obviously, this is not for production: we drop and recreate the index every time we start the application. -<8> We consider the `yellow` status is sufficient to proceed after an index is created. -This is for testing purposes with the Elasticsearch Docker container. -It should not be used in production. -<9> This means that we wait for the entities to be searchable before considering a write complete. -On a production setup, the `write-sync` default will provide better performance. -Using `sync` is especially important when testing as you need the entities to be searchable immediately. - -[TIP] -For more information about the Hibernate Search extension configuration please refer to the <>. - -== Creating a frontend - -Now let's add a simple web page to interact with our `LibraryResource`. -Quarkus automatically serves static resources located under the `META-INF/resources` directory. -In the `src/main/resources/META-INF/resources` directory, overwrite the existing `index.html` file with the content from this -{quickstarts-blob-url}/hibernate-search-elasticsearch-quickstart/src/main/resources/META-INF/resources/index.html[index.html] file. - -== Automatic import script - -For the purpose of this demonstration, let's import an initial dataset. - -Let's create a `src/main/resources/import.sql` file with the following content: - -[source,sql] ----- -INSERT INTO author(id, firstname, lastname) VALUES (nextval('hibernate_sequence'), 'John', 'Irving'); -INSERT INTO author(id, firstname, lastname) VALUES (nextval('hibernate_sequence'), 'Paul', 'Auster'); - -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The World According to Garp', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Hotel New Hampshire', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Cider House Rules', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'A Prayer for Owen Meany', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Last Night in Twisted River', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'In One Person', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Avenue of Mysteries', 1); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The New York Trilogy', 2); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Mr. Vertigo', 2); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Brooklyn Follies', 2); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Invisible', 2); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Sunset Park', 2); -INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), '4 3 2 1', 2); ----- - -== Preparing the infrastructure - -We need a PostgreSQL instance and an Elasticsearch cluster. - -Let's use Docker to start one of each: - -[source, shell,subs=attributes+] ----- -docker run -it --rm=true --name elasticsearch_quarkus_test -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch-oss:{elasticsearch-version} ----- - -[source, shell] ----- -docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name postgresql_quarkus_test -e POSTGRES_USER=quarkus_test -e POSTGRES_PASSWORD=quarkus_test -e POSTGRES_DB=quarkus_test -p 5432:5432 postgres:11.3 ----- - -== Time to play with your application - -You can now interact with your REST service: - - * start Quarkus with `./mvnw compile quarkus:dev` - * open a browser to `http://localhost:8080/` - * search for authors or book titles (we initialized some data for you) - * create new authors and books and search for them too - -As you can see, all your updates are automatically synchronized to the Elasticsearch cluster. - -== Building a native executable - -You can build a native executable with the usual command `./mvnw package -Pnative`. - -[NOTE] -==== -As usual with native executable compilation, this operation consumes a lot of memory. - -It might be safer to stop the two containers while you are building the native executable and start them again once you are done. -==== - -Running it is as simple as executing `./target/hibernate-search-elasticsearch-quickstart-1.0-SNAPSHOT-runner`. - -You can then point your browser to `http://localhost:8080/` and use your application. - -[NOTE] -==== -The startup is a bit slower than usual: it is mostly due to us dropping and recreating the database schema and the Elasticsearch mapping every time at startup. -We also inject some data and execute the mass indexer. - -In a real life application, it is obviously something you won't do at startup. -==== - -== Further reading - -If you are interested into learning more about Hibernate Search 6, the Hibernate team has https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/[some documentation] in the works. - -It is still work in progress but covers all main concepts and features, -so it can guide you in your exploration. - -== FAQ - -=== Why Hibernate Search 6 (and not a fully supported version)? - -To optimize the Hibernate Search bootstrap for Quarkus, the Hibernate team had to reorganize things a bit (collect the metadata offline, start the Elasticsearch client later...). - -This couldn't be done in the 5.x code base so we decided to go with the in-progress Hibernate Search 6. - -=== Can I really use it? - -While Hibernate Search 6 is still at Beta stage, the code is of production quality and can be relied on. - -What we don't guarantee is that there might be API changes along the way to the final release of Hibernate Search 6 and you might have to adapt your code. - -If it is not a major issue for you, then sure you can use it. - -=== Why Elasticsearch only? - -Hibernate Search supports both a Lucene backend and an Elasticsearch backend. - -In the context of Quarkus and to build microservices, we thought the latter would make more sense. -Thus we focused our efforts on it. - -We don't have plans to support the Lucene backend in Quarkus for now. - -[[configuration-reference]] -== Hibernate Search Configuration Reference - -include::{generated-dir}/config/quarkus-hibernate-search-elasticsearch.adoc[leveloffset=+1, opts=optional] diff --git a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc new file mode 100644 index 0000000000000..e0b09a227b57b --- /dev/null +++ b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc @@ -0,0 +1,807 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Hibernate Search guide +:hibernate-search-doc-prefix: https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/ +include::./attributes.adoc[] + +You have a Hibernate ORM-based application? You want to provide a full-featured full-text search to your users? You're at the right place. + +With this guide, you'll learn how to synchronize your entities to an Elasticsearch cluster in a heart beat with Hibernate Search. +We will also explore how you can can query your Elasticsearch cluster using the Hibernate Search API. + +== Prerequisites + +To complete this guide, you need: + +* less than 20 minutes +* an IDE +* JDK 1.8+ installed with `JAVA_HOME` configured appropriately +* Apache Maven {maven-version} +* Docker +* link:building-native-image[GraalVM installed if you want to run in native mode] + +== Architecture + +The application described in this guide allows to manage a (simple) library: you manage authors and their books. + +The entities are stored in a PostgreSQL database and indexed in an Elasticsearch cluster. + +== Solution + +We recommend that you follow the instructions in the next sections and create the application step by step. +However, you can go right to the completed example. + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `hibernate-search-orm-elasticsearch-quickstart` {quickstarts-tree-url}/hibernate-search-orm-elasticsearch-quickstart[directory]. + +[NOTE] +==== +The provided solution contains a few additional elements such as tests and testing infrastructure. +==== + +== Creating the Maven project + +First, we need a new project. Create a new project with the following command: + +[source,bash,subs=attributes+] +---- +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ + -DprojectGroupId=org.acme \ + -DprojectArtifactId=hibernate-search-orm-elasticsearch-quickstart \ + -DclassName="org.acme.hibernate.search.elasticsearch.LibraryResource" \ + -Dpath="/library" \ + -Dextensions="resteasy,hibernate-orm-panache,hibernate-search-orm-elasticsearch,resteasy-jackson,jdbc-postgresql" +cd hibernate-search-orm-elasticsearch-quickstart +---- + +This command generates a Maven structure importing the following extensions: + + * Hibernate ORM with Panache, + * the PostgreSQL JDBC driver, + * Hibernate Search + Elasticsearch, + * RESTEasy and Jackson. + +If you already have your Quarkus project configured, you can add the `hibernate-search-orm-elasticsearch` extension +to your project by running the following command in your project base directory: + +[source,bash] +---- +./mvnw quarkus:add-extension -Dextensions="hibernate-search-orm-elasticsearch" +---- + +This will add the following to your `pom.xml`: + +[source,xml] +---- + + io.quarkus + quarkus-hibernate-search-orm-elasticsearch + +---- + +[IMPORTANT] +==== +For now, let's delete the two generated tests `LibraryResourceTest` and `NativeLibraryResourceIT` present in `src/test/java`. +If you are interested in how you can test this application, just refer to the solution in the quickstarts Git repository: +it contains a lot of tests and the required testing infrastructure. +==== + +== Creating the bare entities + +First, let's create our Hibernate ORM entities `Book` and `Author` in the `model` subpackage. + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch.model; + +import java.util.List; +import java.util.Objects; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.OneToMany; + +import io.quarkus.hibernate.orm.panache.PanacheEntity; + +@Entity +public class Author extends PanacheEntity { // <1> + + public String firstName; + + public String lastName; + + @OneToMany(mappedBy = "author", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) // <2> + public List books; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof Author)) { + return false; + } + + Author other = (Author) o; + + return Objects.equals(id, other.id); + } + + @Override + public int hashCode() { + return 31; + } +} +---- +<1> We are using Hibernate ORM with Panache, it is not mandatory. +<2> We are loading these elements eagerly so that they are present in the JSON output. +In a real world application, you should probably use a DTO approach. + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch.model; + +import java.util.Objects; + +import javax.persistence.Entity; +import javax.persistence.ManyToOne; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import io.quarkus.hibernate.orm.panache.PanacheEntity; + +@Entity +public class Book extends PanacheEntity { + + public String title; + + @ManyToOne + @JsonIgnore <1> + public Author author; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof Book)) { + return false; + } + + Book other = (Book) o; + + return Objects.equals(id, other.id); + } + + @Override + public int hashCode() { + return 31; + } +} +---- +<1> We mark this property with `@JsonIgnore` to avoid infinite loops when serializing with Jackson. + +== Initializing the REST service + +While everything is not yet set up for our REST service, we can initialize it with the standard CRUD operations we will need. + +Just copy this content in the `LibraryResource` file created by the Maven `create-project` command: + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch; + +import javax.transaction.Transactional; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import org.acme.hibernate.search.elasticsearch.model.Author; +import org.acme.hibernate.search.elasticsearch.model.Book; +import org.jboss.resteasy.annotations.jaxrs.FormParam; +import org.jboss.resteasy.annotations.jaxrs.PathParam; + +@Path("/library") +public class LibraryResource { + + @PUT + @Path("book") + @Transactional + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public void addBook(@FormParam String title, @FormParam Long authorId) { + Author author = Author.findById(authorId); + if (author == null) { + return; + } + + Book book = new Book(); + book.title = title; + book.author = author; + book.persist(); + + author.books.add(book); + author.persist(); + } + + @DELETE + @Path("book/{id}") + @Transactional + public void deleteBook(@PathParam Long id) { + Book book = Book.findById(id); + if (book != null) { + book.author.books.remove(book); + book.delete(); + } + } + + @PUT + @Path("author") + @Transactional + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public void addAuthor(@FormParam String firstName, @FormParam String lastName) { + Author author = new Author(); + author.firstName = firstName; + author.lastName = lastName; + author.persist(); + } + + @POST + @Path("author/{id}") + @Transactional + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public void updateAuthor(@PathParam Long id, @FormParam String firstName, @FormParam String lastName) { + Author author = Author.findById(id); + if (author == null) { + return; + } + author.firstName = firstName; + author.lastName = lastName; + author.persist(); + } + + @DELETE + @Path("author/{id}") + @Transactional + public void deleteAuthor(@PathParam Long id) { + Author author = Author.findById(id); + if (author != null) { + author.delete(); + } + } +} +---- + +Nothing out of the ordinary here: it is just good old Hibernate ORM with Panache operations in a standard JAX-RS service. + +In fact, the interesting part is that we will need to add very few elements to make our full text search application working. + +== Using Hibernate Search annotations + +Let's go back to our entities. + +Enabling full text search capabilities for them is as simple as adding a few annotations. + +Let's edit the `Book` entity again to include this content: + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch.model; + +import java.util.Objects; + +import javax.persistence.Entity; +import javax.persistence.ManyToOne; + +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import io.quarkus.hibernate.orm.panache.PanacheEntity; + +@Entity +@Indexed // <1> +public class Book extends PanacheEntity { + + @FullTextField(analyzer = "english") // <2> + public String title; + + @ManyToOne + @JsonIgnore + public Author author; + + // Preexisting equals()/hashCode() methods +} +---- +<1> First, let's use the `@Indexed` annotation to register our `Book` entity as part of the full text index. +<2> The `@FullTextField` annotation declares a field in the index specifically tailored for full text search. +In particular, we have to define an analyzer to split and analyze the tokens (~ words) - more on this later. + +Now that our books are indexed, we can do the same for the authors. + +Open the `Author` class and include the content below. + +Things are quite similar here: we use the `@Indexed`, `@FullTextField` and `@KeywordField` annotations. + +There are a few differences/additions though. Let's check them out. + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch.model; + +import java.util.List; +import java.util.Objects; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.OneToMany; + +import org.hibernate.search.engine.backend.types.Sortable; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.KeywordField; + +import io.quarkus.hibernate.orm.panache.PanacheEntity; + +@Entity +@Indexed +public class Author extends PanacheEntity { + + @FullTextField(analyzer = "name") // <1> + @KeywordField(name = "firstName_sort", sortable = Sortable.YES, normalizer = "sort") // <2> + public String firstName; + + @FullTextField(analyzer = "name") + @KeywordField(name = "lastName_sort", sortable = Sortable.YES, normalizer = "sort") + public String lastName; + + @OneToMany(mappedBy = "author", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @IndexedEmbedded // <3> + public List books; + + // Preexisting equals()/hashCode() methods +} +---- +<1> We use a `@FullTextField` similar to what we did for `Book` but you'll notice that the analyzer is different - more on this later. +<2> As you can see, we can define several fields for the same property. +Here, we define a `@KeywordField` with a specific name. +The main difference is that a keyword field is not tokenized (the string is kept as one single token) but can be normalized (i.e. filtered) - more on this later. +This field is marked as sortable as our intention is to use it for sorting our authors. +<3> The purpose of `@IndexedEmbedded` is to include the `Book` fields into the `Author` index. +In this case, we just use the default configuration: all the fields of the associated `Book` entities are included in the index (i.e. the `title` field). +The nice thing with `@IndexedEmbedded` is that it is able to automatically reindex an `Author` if one of its ``Book``s has been updated thanks to the bidirectional relation. +`@IndexedEmbedded` also supports nested documents (using the `storage = NESTED` attribute) but we don't need it here. +You can also specify the fields you want to include in your parent index using the `includePaths` attribute if you don't want them all. + +== Analyzers and normalizers + +=== Introduction + +Analysis is a big part of full text search: it defines how text will be processed when indexing or building search queries. + +The role of analyzers is to split the text into tokens (~ words) and filter them (making it all lowercase and removing accents for instance). + +Normalizers are a special type of analyzers that keeps the input as a single token. +It is especially useful for sorting or indexing keywords. + +There are a lot of bundled analyzers but you can also develop your own for your own specific purposes. + +You can learn more about the Elasticsearch analysis framework in the https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis.html[Analysis section of the Elasticsearch documentation]. + +=== Defining the analyzers used + +When we added the Hibernate Search annotations to our entities, we defined the analyzers and normalizers used. +Typically: + +[source,java] +---- +@FullTextField(analyzer = "english") +---- + +[source,java] +---- +@FullTextField(analyzer = "name") +---- + +[source,java] +---- +@KeywordField(name = "lastName_sort", sortable = Sortable.YES, normalizer = "sort") +---- + +We use: + + * an analyzer called `name` for person names, + * an analyzer called `english` for book titles, + * a normalizer called `sort` for our sort fields + +but we haven't set them up yet. + +Let's see how you can do it with Hibernate Search. + +[[analysis-configurer]] +=== Setting up the analyzers + +It is an easy task, we just need to create an implementation of `ElasticsearchAnalysisConfigurer` +(and configure Quarkus to use it, more on that later). + +To fulfill our requirements, let's create the following implementation: + +[source,java] +---- +package org.acme.hibernate.search.elasticsearch.config; + +import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurationContext; +import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurer; + +import javax.enterprise.context.Dependent; +import javax.inject.Named; + +@Dependent +@Named("myAnalysisConfigurer") // <1> +public class AnalysisConfigurer implements ElasticsearchAnalysisConfigurer { + + @Override + public void configure(ElasticsearchAnalysisConfigurationContext context) { + context.analyzer("name").custom() // <2> + .tokenizer("standard") + .tokenFilters("asciifolding", "lowercase"); + + context.analyzer("english").custom() // <3> + .tokenizer("standard") + .tokenFilters("asciifolding", "lowercase", "porter_stem"); + + context.normalizer("sort").custom() // <4> + .tokenFilters("asciifolding", "lowercase"); + } +} +---- +<1> We will need to reference the configurer from the configuration properties, so we make it a named bean. +<2> This is a simple analyzer separating the words on spaces, removing any non-ASCII characters by its ASCII counterpart (and thus removing accents) and putting everything in lowercase. +It is used in our examples for the author's names. +<3> We are a bit more aggressive with this one and we include some stemming: we will be able to search for `mystery` and get a result even if the indexed input contains `mysteries`. +It is definitely too aggressive for person names but it is perfect for the book titles. +<4> Here is the normalizer used for sorting. Very similar to our first analyzer, except we don't tokenize the words as we want one and only one token. + +== Adding full text capabilities to our REST service + +In our existing `LibraryResource`, we just need to inject the `SearchSession`: + +[source,java] +---- + @Inject + SearchSession searchSession; // <1> +---- +<1> Inject a Hibernate Search session, which relies on the `EntityManager` under the hood. +Applications with multiple persistence units can use the CDI qualifier `@io.quarkus.hibernate.orm.PersistenceUnit` +to select the right one: +see <>. + +And then we can add the following methods (and a few ``import``s): + +[source,java] +---- + @Transactional // <1> + void onStart(@Observes StartupEvent ev) throws InterruptedException { // <2> + // only reindex if we imported some content + if (Book.count() > 0) { + searchSession.massIndexer() + .startAndWait(); + } + } + + @GET + @Path("author/search") // <3> + @Transactional + public List searchAuthors(@QueryParam String pattern, // <4> + @QueryParam Optional size) { + return searchSession.search(Author.class) // <5> + .where(f -> + pattern == null || pattern.trim().isEmpty() ? + f.matchAll() : // <6> + f.simpleQueryString() + .fields("firstName", "lastName", "books.title").matching(pattern) // <7> + ) + .sort(f -> f.field("lastName_sort").then().field("firstName_sort")) // <8> + .fetchHits(size.orElse(20)); // <9> + } +---- +<1> Important point: we need a transactional context for these methods. +<2> As we will import data into the PostgreSQL database using an SQL script, we need to reindex the data at startup. +For this, we use Hibernate Search's mass indexer, which allows to index a lot of data efficiently (you can fine tune it for better performances). +All the upcoming updates coming through Hibernate ORM operations will be synchronized automatically to the full text index. +If you don't import data manually in the database, you don't need that: +the mass indexer should then only be used when you change your indexing configuration (adding a new field, changing an analyzer's configuration...) and you want the new configuration to be applied to your existing entities. +<3> This is where the magic begins: just adding the annotations to our entities makes them available for full text search: we can now query the index using the Hibernate Search DSL. +<4> Use the `org.jboss.resteasy.annotations.jaxrs.QueryParam` annotation type to avoid repeating the parameter name. +<5> We indicate that we are searching for ``Author``s. +<6> We create a predicate: if the pattern is empty, we use a `matchAll()` predicate. +<7> If we have a valid pattern, we create a https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html[`simpleQueryString()`] predicate on the `firstName`, `lastName` and `books.title` fields matching our pattern. +<8> We define the sort order of our results. Here we sort by last name, then by first name. Note that we use the specific fields we created for sorting. +<9> Fetch the `size` top hits, `20` by default. Obviously, paging is also supported. + +[NOTE] +==== +The Hibernate Search DSL supports a significant subset of the Elasticsearch predicates (match, range, nested, phrase, spatial...). +Feel free to explore the DSL using autocompletion. + +When that's not enough, you can always fall back to +link:{hibernate-search-doc-prefix}#search-dsl-predicate-extensions-elasticsearch-from-json[defining a predicate using JSON directly]. +==== + +== Configuring the application + +As usual, we can configure everything in the Quarkus configuration file, `application.properties`. + +Edit `src/main/resources/application.properties` and inject the following configuration: + +[source,properties] +---- +quarkus.ssl.native=false <1> + +quarkus.datasource.db-kind=postgresql <2> +quarkus.datasource.username=quarkus_test +quarkus.datasource.password=quarkus_test +quarkus.datasource.jdbc.url=jdbc:postgresql:quarkus_test + +quarkus.hibernate-orm.database.generation=drop-and-create <3> +quarkus.hibernate-orm.sql-load-script=import.sql <4> + +quarkus.hibernate-search-orm.elasticsearch.version=7 <5> +quarkus.hibernate-search-orm.elasticsearch.analysis.configurer=bean:myAnalysisConfigurer <6> +quarkus.hibernate-search-orm.schema-management.strategy=drop-and-create <7> +quarkus.hibernate-search-orm.automatic-indexing.synchronization.strategy=sync <8> +---- +<1> We won't use SSL so we disable it to have a more compact native executable. +<2> Let's create a PostgreSQL datasource. +<3> We will drop and recreate the schema every time we start the application. +<4> We load some initial data. +<5> We need to tell Hibernate Search about the version of Elasticsearch we will use. +It is important because there are significant differences between Elasticsearch mapping syntax depending on the version. +Since the mapping is created at build time to reduce startup time, Hibernate Search cannot connect to the cluster to automatically detect the version. +<6> We point to the custom `AnalysisConfigurer` which defines the configuration of our analyzers and normalizers. +<7> Obviously, this is not for production: we drop and recreate the index every time we start the application. +<8> This means that we wait for the entities to be searchable before considering a write complete. +On a production setup, the `write-sync` default will provide better performance. +Using `sync` is especially important when testing as you need the entities to be searchable immediately. + +[TIP] +For more information about the Hibernate Search extension configuration please refer to the <>. + +== Creating a frontend + +Now let's add a simple web page to interact with our `LibraryResource`. +Quarkus automatically serves static resources located under the `META-INF/resources` directory. +In the `src/main/resources/META-INF/resources` directory, overwrite the existing `index.html` file with the content from this +{quickstarts-blob-url}/hibernate-search-orm-elasticsearch-quickstart/src/main/resources/META-INF/resources/index.html[index.html] file. + +== Automatic import script + +For the purpose of this demonstration, let's import an initial dataset. + +Let's create a `src/main/resources/import.sql` file with the following content: + +[source,sql] +---- +INSERT INTO author(id, firstname, lastname) VALUES (nextval('hibernate_sequence'), 'John', 'Irving'); +INSERT INTO author(id, firstname, lastname) VALUES (nextval('hibernate_sequence'), 'Paul', 'Auster'); + +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The World According to Garp', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Hotel New Hampshire', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Cider House Rules', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'A Prayer for Owen Meany', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Last Night in Twisted River', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'In One Person', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Avenue of Mysteries', 1); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The New York Trilogy', 2); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Mr. Vertigo', 2); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'The Brooklyn Follies', 2); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Invisible', 2); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), 'Sunset Park', 2); +INSERT INTO book(id, title, author_id) VALUES (nextval('hibernate_sequence'), '4 3 2 1', 2); +---- + +== Preparing the infrastructure + +We need a PostgreSQL instance and an Elasticsearch cluster. + +Let's use Docker to start one of each: + +[source,bash,subs=attributes+] +---- +docker run -it --rm=true --name elasticsearch_quarkus_test -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch-oss:{elasticsearch-version} +---- + +[source,bash] +---- +docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name postgresql_quarkus_test -e POSTGRES_USER=quarkus_test -e POSTGRES_PASSWORD=quarkus_test -e POSTGRES_DB=quarkus_test -p 5432:5432 postgres:13.1 +---- + +== Time to play with your application + +You can now interact with your REST service: + + * start Quarkus with `./mvnw compile quarkus:dev` + * open a browser to `http://localhost:8080/` + * search for authors or book titles (we initialized some data for you) + * create new authors and books and search for them too + +As you can see, all your updates are automatically synchronized to the Elasticsearch cluster. + +[[multiple-persistence-units]] +== Multiple persistence units + +=== Configuring multiple persistence units + +With the Hibernate ORM extension, +link:hibernate-orm#multiple-persistence-units[you can set up multiple persistence units], +each with its own datasource and configuration. + +If you do declare multiple persistence units, +you will also configure Hibernate Search separately for each persistence unit. + +The properties at the root of the `quarkus.hibernate-search-orm.` namespace define the default persistence unit. +For instance, the following snippet defines a default datasource and a default persistence unit, +and sets the Elasticsearch host for that persistence unit to `es1.mycompany.com:9200`. + +[source,properties] +---- +quarkus.datasource.db-kind=h2 +quarkus.datasource.jdbc.url=jdbc:h2:mem:default;DB_CLOSE_DELAY=-1 + +quarkus.hibernate-orm.dialect=org.hibernate.dialect.H2Dialect + +quarkus.hibernate-search-orm.elasticsearch.hosts=es1.mycompany.com:9200 +quarkus.hibernate-search-orm.elasticsearch.version=7 +quarkus.hibernate-search-orm.automatic-indexing.synchronization.strategy=write-sync +---- + +Using a map based approach, it is also possible to configure named persistence units: + +[source,properties] +---- +quarkus.datasource."users".db-kind=h2 <1> +quarkus.datasource."users".jdbc.url=jdbc:h2:mem:users;DB_CLOSE_DELAY=-1 + +quarkus.datasource."inventory".db-kind=h2 <2> +quarkus.datasource."inventory".jdbc.url=jdbc:h2:mem:inventory;DB_CLOSE_DELAY=-1 + +quarkus.hibernate-orm."users".datasource=users <3> +quarkus.hibernate-orm."users".packages=org.acme.model.user + +quarkus.hibernate-orm."inventory".datasource=inventory <4> +quarkus.hibernate-orm."inventory".packages=org.acme.model.inventory + +quarkus.hibernate-search-orm."users".elasticsearch.hosts=es1.mycompany.com:9200 <5> +quarkus.hibernate-search-orm."users".elasticsearch.version=7 +quarkus.hibernate-search-orm."users".automatic-indexing.synchronization.strategy=write-sync + +quarkus.hibernate-search-orm."inventory".elasticsearch.hosts=es2.mycompany.com:9200 <6> +quarkus.hibernate-search-orm."inventory".elasticsearch.version=7 +quarkus.hibernate-search-orm."inventory".automatic-indexing.synchronization.strategy=write-sync +---- +<1> Define a datasource named `users`. +<2> Define a datasource named `inventory`. +<3> Define a persistence unit called `users` pointing to the `users` datasource. +<4> Define a persistence unit called `inventory` pointing to the `inventory` datasource. +<5> Configure Hibernate Search for the `users` persistence unit, +setting the Elasticsearch host for that persistence unit to `es1.mycompany.com:9200`. +<6> Configure Hibernate Search for the `inventory` persistence unit, +setting the Elasticsearch host for that persistence unit to `es2.mycompany.com:9200`. + +[[multiple-persistence-units-attaching-model-classes]] +=== Attaching model classes to persistence units + +For each persistence unit, Hibernate Search will only consider indexed entities that are attached to that persistence unit. +Entities are attached to a persistence unit by +link:hibernate-orm#multiple-persistence-units-attaching-model-classes[configuring the Hibernate ORM extension]. + +[[multiple-persistence-units-attaching-cdi]] +=== CDI integration + +You can inject Hibernate Search's main entry points, `SearchSession` and `SearchMapping`, using CDI: + +[source,java] +---- +@Inject +SearchSession searchSession; +---- + +This will inject the `SearchSession` of the default persistence unit. + +To inject the `SearchSession` of a named persistence unit (`users` in our example), +just add a qualifier: + +[source,java] +---- +@Inject +@PersistenceUnit("users") <1> +SearchSession searchSession; +---- +<1> This is the `@io.quarkus.hibernate.orm.PersistenceUnit` annotation. + +You can inject the `SearchMapping` of a named persistence unit using the exact same mechanism: + +[source,java] +---- +@Inject +@PersistenceUnit("users") +SearchMapping searchMapping; +---- + +== Building a native executable + +You can build a native executable with the usual command `./mvnw package -Pnative`. + +[NOTE] +==== +As usual with native executable compilation, this operation consumes a lot of memory. + +It might be safer to stop the two containers while you are building the native executable and start them again once you are done. +==== + +Running it is as simple as executing `./target/hibernate-search-orm-elasticsearch-quickstart-1.0.0-SNAPSHOT-runner`. + +You can then point your browser to `http://localhost:8080/` and use your application. + +[NOTE] +==== +The startup is a bit slower than usual: it is mostly due to us dropping and recreating the database schema and the Elasticsearch mapping every time at startup. +We also inject some data and execute the mass indexer. + +In a real life application, it is obviously something you won't do at startup. +==== + +== Further reading + +If you are interested in learning more about Hibernate Search 6, +the Hibernate team publishes link:{hibernate-search-doc-prefix}[an extensive reference documentation]. + +== FAQ + +=== Why Elasticsearch only? + +Hibernate Search supports both a Lucene backend and an Elasticsearch backend. + +In the context of Quarkus and to build microservices, we thought the latter would make more sense. +Thus we focused our efforts on it. + +We don't have plans to support the Lucene backend in Quarkus for now. + +[[configuration-reference]] +== Hibernate Search Configuration Reference + +include::{generated-dir}/config/quarkus-hibernate-search-orm-elasticsearch.adoc[leveloffset=+1, opts=optional] + +[NOTE] +[[bean-reference-note-anchor]] +.About bean references +==== +When referencing beans using a string value in configuration properties, that string is parsed. + +Here are the most common formats: + +* `bean:` followed by the name of a `@Named` CDI bean. +For example `bean:myBean`. +* `class:` followed by the fully-qualified name of a class, to be instantiated through CDI if it's a CDI bean, +or through its public, no-argument constructor otherwise. +For example `class:com.mycompany.MyClass`. +* An arbitrary string referencing a built-in implementation. +Available values are detailed in the documentation of each configuration property, +such as `async`/`read-sync`/`write-sync`/`sync` for +<>. + +Other formats are also accepted, but are only useful for advanced use cases. +See link:{hibernate-search-doc-prefix}#configuration-bean-reference-parsing[this section of Hibernate Search's reference documentation] +for more information. +==== diff --git a/docs/src/main/asciidoc/http-reference.adoc b/docs/src/main/asciidoc/http-reference.adoc index b10f5dbe9a9b5..5ff491113760d 100644 --- a/docs/src/main/asciidoc/http-reference.adoc +++ b/docs/src/main/asciidoc/http-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - HTTP Reference @@ -67,6 +67,7 @@ will be served relative to `{quarkus.http.root-path}/{quarkus.servlet.context-pa If REST Assured is used for testing and `quarkus.http.root-path` is set then Quarkus will automatically configure the base URL for use in Quarkus tests, so test URL's should not include the root path. +[[ssl]] == Supporting secure connections with SSL In order to have Quarkus support secure connections, you must either provide a certificate and associated key file, or supply a keystore. @@ -101,7 +102,7 @@ As with the certificate/key file combination, Quarkus will first try to resolve Add the following property to your `application.properties`: -[source,shell] +[source,bash] ---- quarkus.http.ssl.certificate.key-store-file=/path/to/keystore ---- @@ -155,6 +156,19 @@ If you want to disable HTTP/2 you can set: quarkus.http.http2=false ---- +== Listening on a Random Port + +If you don't want to specify a port you can set `quarkus.http.port=0` or `quarkus.http.test-port=0`. A random open port +will be picked by the OS, and a log message printed in the console. When the port is bound the `quarkus.http.port` system +property will be set to the actual port that was selected, so you can use this to get the actual port number from inside +the application. If you are in a test you can inject the URL normally and this will be configured with the actual port, +and REST Assured will also be configured appropriately. + +WARNING: As this sets a system property you can access `quarkus.http.port` via MicroProfile Config, however if you use +injection the injected value may not always be correct. This port allocation is one of the last things to happen in +Quarkus startup, so if your object that is being injected is created eagerly before the port has opened the injected +value will not be correct. + == CORS filter link:https://en.wikipedia.org/wiki/Cross-origin_resource_sharing[Cross-origin resource sharing] (CORS) is a mechanism that @@ -175,13 +189,13 @@ following properties will be applied before passing the request on to its actual [cols=" authors; + private final BigDecimal price; - public Book(String title, String description, int publicationYear, Set authors) { + public Book(String title, String description, int publicationYear, Set authors, BigDecimal price) { this.title = Objects.requireNonNull(title); this.description = Objects.requireNonNull(description); this.publicationYear = publicationYear; this.authors = Objects.requireNonNull(authors); + this.price = price; } // Getter/Setter/equals/hashCode/toString omitted } @@ -144,6 +146,24 @@ Here is an example of how the preceding classes should be changed: return authors; } ---- + +.BigDecimalAdapter.java +[source,java] +---- +@ProtoAdapter(BigDecimal.class) +public class BigDecimalAdapter { + @ProtoFactory + BigDecimal create(Double value) { + return BigDecimal.valueOf(value); + } + + @ProtoField(number = 1, type = Type.DOUBLE, defaultValue = "0") + Double value(BigDecimal value) { + return value.doubleValue(); + } +} +---- + If your classes have only mutable fields, then the `ProtoFactory` annotation is not required, assuming your class has a no arg constructor. @@ -153,7 +173,7 @@ on it to specify configuration settings .BookContextInitializer.java [source,java] ---- -@AutoProtoSchemaBuilder(includeClasses = { Book.class, Author.class }, schemaPackageName = "book_sample") +@AutoProtoSchemaBuilder(includeClasses = { Book.class, Author.class, BigDecimalAdapter.class }, schemaPackageName = "book_sample") interface BookContextInitializer extends SerializationContextInitializer { } ---- @@ -185,8 +205,8 @@ message Book { required string title = 1; required string description = 2; required int32 publicationYear = 3; // no native Date type available in Protobuf - repeated Author authors = 4; + requited double price = 5; // no native BigDecimal type available in Protobuf } message Author { @@ -211,6 +231,8 @@ message Author { " required int32 publicationYear = 3; // no native Date type available in Protobuf\n" + "\n" + " repeated Author authors = 4;\n" + + "\n" + + " required double price = 5; // no native BigDecimal type available in Protobuf\n" + "}\n" + "\n" + "message Author {\n" + @@ -279,6 +301,7 @@ public class BookMarshaller implements MessageMarshaller { writer.writeString("description", book.getDescription()); writer.writeInt("publicationYear", book.getPublicationYear()); writer.writeCollection("authors", book.getAuthors(), Author.class); + writer.writeDouble("price", book.getPrice().doubleValue()); } @Override @@ -287,7 +310,8 @@ public class BookMarshaller implements MessageMarshaller { String description = reader.readString("description"); int publicationYear = reader.readInt("publicationYear"); Set authors = reader.readCollection("authors", new HashSet<>(), Author.class); - return new Book(title, description, publicationYear, authors); + BigDecimal price = BigDecimal.valueOf(reader.readDouble("price")); + return new Book(title, description, publicationYear, authors, price); } } ---- @@ -348,7 +372,7 @@ Either method of Serialization above will automatically register the schema with startup, meaning that you will automatically gain the ability to query objects stored in the remote Infinispan Server. -You can read more about this at https://infinispan.org/docs/stable/titles/developing/developing.html#query_dsl. +You can read more about https://infinispan.org/docs/stable/titles/developing/developing.html#creating_ickle_queries-querying[querying] in the Infinispan documentation. You can use either the Query DSL or the Ickle Query language with the Quarkus Infinispan client extension. @@ -368,6 +392,8 @@ and you get it with no fuss. You can then use counters as you would normally. CounterManager counterManager; ---- +You can read more about https://infinispan.org/docs/stable/titles/developing/developing.html#clustered_counters[clustered counters] in the Infinispan documentation. + == Near Caching Near caching is disabled by default, but you can enable it by setting the profile config property @@ -380,8 +406,7 @@ a regular expression so that only a subset of caches have near caching applied t Encryption at this point requires additional steps to get working. The first step is to configure the `hotrod-client.properties` file to point to your truststore -and/or keystore. This is further detailed at -https://infinispan.org/docs/dev/user_guide/user_guide.html#hr_encryption. +and/or keystore. This is further detailed https://infinispan.org/docs/stable/titles/hotrod_java/hotrod_java.html#hotrod_encryption[here]. The Infinispan Client extension enables SSL by default. You can read more about this at link:native-and-ssl[Using SSL With Native Executables]. @@ -412,7 +437,7 @@ the Quarkus Infinispan Client extension. |=== -The guide for configuring these can be found at https://infinispan.org/docs/dev/user_guide/user_guide.html#authentication. +The guide for configuring these can be found https://infinispan.org/docs/stable/titles/hotrod_java/hotrod_java.html#hotrod_authentication[here]. However you need to configure these through the `hotrod-client.properties` file if using Dependency Injection. == Additional Features diff --git a/docs/src/main/asciidoc/jgit.adoc b/docs/src/main/asciidoc/jgit.adoc index 35939eab6a05f..c357432a345e1 100644 --- a/docs/src/main/asciidoc/jgit.adoc +++ b/docs/src/main/asciidoc/jgit.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - JGit diff --git a/docs/src/main/asciidoc/jms.adoc b/docs/src/main/asciidoc/jms.adoc index 8b5b8806c9068..f49be31e9e1d8 100644 --- a/docs/src/main/asciidoc/jms.adoc +++ b/docs/src/main/asciidoc/jms.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using JMS include::./attributes.adoc[] @@ -51,24 +51,25 @@ We recommend that you follow the instructions in the next sections and create th However, you can go right to the completed example. Clone the Git repository: `git clone https://github.com/amqphub/quarkus-qpid-jms-quickstart.git`, -or download an https://github.com/amqphub/quarkus-qpid-jms-quickstart/archive/master.zip[archive]. - +or download an https://github.com/amqphub/quarkus-qpid-jms-quickstart/archive/main.zip[archive]. === Creating the Maven Project First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=jms-quickstart \ - -Dextensions="qpid-jms" + -DclassName="org.acme.jms.PriceResource" \ + -Dpath="/prices" \ + -Dextensions="resteasy,qpid-jms" cd jms-quickstart ---- This command generates a Maven project, with its pom.xml importing the quarkus-qpid-jms extension: -[source] +[source,xml] ---- org.amqphub.quarkus @@ -82,7 +83,7 @@ This command generates a Maven project, with its pom.xml importing the quarkus-q Then, we need an AMQP broker. In this case we will use an ActiveMQ Artemis server. You can follow the instructions from the https://activemq.apache.org/components/artemis/[Apache Artemis web site] or start a broker via docker: -[source] +[source,bash] ---- docker run -it --rm -p 8161:8161 -p 61616:61616 -p 5672:5672 -e ARTEMIS_USERNAME=quarkus -e ARTEMIS_PASSWORD=quarkus vromero/activemq-artemis:2.11.0-alpine ---- @@ -287,7 +288,7 @@ injecting the ConnectionFactory. This is done in the `src/main/resources/application.properties` file. -[source] +[source,properties] ---- # Configures the Qpid JMS properties. quarkus.qpid-jms.url=amqp://localhost:5672 @@ -303,7 +304,7 @@ More detail about the configuration are available in the https://github.com/amqp If you followed the instructions, you should have the Artemis server running. Then, you just need to run the application using: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -314,22 +315,22 @@ Open `http://localhost:8080/prices.html` in your browser. You can build the native executable with: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative ---- Or, if you don't have GraalVM installed, you can instead use Docker to build the native executable using: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative -Dquarkus.native.container-build=true ---- and then run with: -[source, shell] +[source,bash] ---- -./target/jms-quickstart-1.0-SNAPSHOT-runner +./target/jms-quickstart-1.0.0-SNAPSHOT-runner ---- Open `http://localhost:8080/prices.html` in your browser. @@ -359,17 +360,19 @@ The Artemis JMS solution is located in the `jms-quickstart` {quickstarts-tree-ur Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=jms-quickstart \ - -Dextensions="artemis-jms" + -DclassName="org.acme.jms.PriceResource" \ + -Dpath="/prices" \ + -Dextensions="resteasy,artemis-jms" cd jms-quickstart ---- This creates a Maven project, with its pom.xml importing the quarkus-artemis-jms extension: -[source] +[source,xml] ---- io.quarkus @@ -386,7 +389,7 @@ configuration below instead. We need to configure the Artemis connection properties. This is done in the `src/main/resources/application.properties` file. -[source] +[source,properties] ---- # Configures the Artemis properties. quarkus.artemis.url=tcp://localhost:61616 diff --git a/docs/src/main/asciidoc/kafka-streams.adoc b/docs/src/main/asciidoc/kafka-streams.adoc index cbd149dddeaf3..474774966661a 100644 --- a/docs/src/main/asciidoc/kafka-streams.adoc +++ b/docs/src/main/asciidoc/kafka-streams.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Apache Kafka Streams @@ -32,14 +32,14 @@ Using the dev mode, you'll instantly see messages on the output topic(s) as prod For the best development experience, we recommend applying the following configuration settings to your Kafka broker: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- group.min.session.timeout.ms=250 ---- Also specify the following settings in your Quarkus `application.properties`: -[source, subs=attributes+] +[source,properties,subs=attributes+] ---- kafka-streams.consumer.session.timeout.ms=250 kafka-streams.consumer.heartbeat.interval.ms=200 @@ -52,7 +52,7 @@ Together, these settings will ensure that the application can very quickly recon In this guide, we are going to generate (random) temperature values in one component (named `generator`). These values are associated to given weather stations and are written in a Kafka topic (`temperature-values`). -Another topic (`weather-stations`) contains just the master data about the weather stations themselves (id and name). +Another topic (`weather-stations`) contains just the main data about the weather stations themselves (id and name). A second component (`aggregator`) reads from the two Kafka topics and processes them in a streaming pipeline: @@ -82,12 +82,13 @@ The solution is located in the `kafka-streams-quickstart` {quickstarts-tree-url} First, we need a new project with the temperature value producer. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=kafka-streams-quickstart-producer \ -Dextensions="kafka" \ + -DnoExamples \ && mv kafka-streams-quickstart-producer producer ---- @@ -98,7 +99,7 @@ to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="kafka" +./mvnw quarkus:add-extension -Dextensions="quarkus-smallrye-reactive-messaging-kafka" ---- This will add the following to your `pom.xml`: @@ -122,22 +123,20 @@ package org.acme.kafka.streams.producer.generator; import java.math.BigDecimal; import java.math.RoundingMode; +import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Random; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import javax.enterprise.context.ApplicationScoped; +import io.smallrye.mutiny.Multi; +import io.smallrye.reactive.messaging.kafka.Record; import org.eclipse.microprofile.reactive.messaging.Outgoing; import org.jboss.logging.Logger; -import io.reactivex.Flowable; -import io.smallrye.reactive.messaging.kafka.KafkaRecord; - /** * A bean producing random temperature data every second. * The values are written to a Kafka topic (temperature-values). @@ -164,12 +163,10 @@ public class ValuesGenerator { new WeatherStation(9, "Marrakesh", 20) )); - - @Outgoing("temperature-values") // <1> - public Flowable> generate() { - - return Flowable.interval(500, TimeUnit.MILLISECONDS) // <2> - .onBackpressureDrop() + @Outgoing("temperature-values") // <1> + public Multi> generate() { + return Multi.createFrom().ticks().every(Duration.ofMillis(500)) // <2> + .onOverflow().drop() .map(tick -> { WeatherStation station = stations.get(random.nextInt(stations.size())); double temperature = BigDecimal.valueOf(random.nextGaussian() * 15 + station.averageTemperature) @@ -177,21 +174,19 @@ public class ValuesGenerator { .doubleValue(); LOG.infov("station: {0}, temperature: {1}", station.name, temperature); - return KafkaRecord.of(station.id, Instant.now() + ";" + temperature); + return Record.of(station.id, Instant.now() + ";" + temperature); }); } - @Outgoing("weather-stations") // <3> - public Flowable> weatherStations() { - List> stationsAsJson = stations.stream() - .map(s -> KafkaRecord.of( + @Outgoing("weather-stations") // <3> + public Multi> weatherStations() { + return Multi.createFrom().items(stations.stream() + .map(s -> Record.of( s.id, "{ \"id\" : " + s.id + ", \"name\" : \"" + s.name + "\" }")) - .collect(Collectors.toList()); - - return Flowable.fromIterable(stationsAsJson); - }; + ); + } private static class WeatherStation { @@ -207,9 +202,9 @@ public class ValuesGenerator { } } ---- -<1> Instruct Reactive Messaging to dispatch the items from the returned `Flowable` to `temperature-values`. -<2> The method returns a RX Java 2 _stream_ (`Flowable`) emitting a random temperature value every 0.5 seconds. -<3> Instruct Reactive Messaging to dispatch the items from the returned `Flowable` (static list of weather stations) to `weather-stations`. +<1> Instruct Reactive Messaging to dispatch the items from the returned `Multi` to `temperature-values`. +<2> The method returns a Mutiny _stream_ (`Multi`) emitting a random temperature value every 0.5 seconds. +<3> Instruct Reactive Messaging to dispatch the items from the returned `Multi` (static list of weather stations) to `weather-stations`. The two methods each return a _reactive stream_ whose items are sent to the streams named `temperature-values` and `weather-stations`, respectively. @@ -218,7 +213,7 @@ The two methods each return a _reactive stream_ whose items are sent to the stre The two channels are mapped to Kafka topics using the Quarkus configuration file `application.properties`. For that, add the following to the file `producer/src/main/resources/application.properties`: -[source] +[source,properties] ---- # Configure the Kafka broker location kafka.bootstrap.servers=localhost:9092 @@ -241,16 +236,17 @@ With the producer application in place, it's time to implement the actual aggreg which will run the Kafka Streams pipeline. Create another project like so: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=kafka-streams-quickstart-aggregator \ - -Dextensions="kafka-streams,resteasy-jsonb" \ + -Dextensions="resteasy,kafka-streams,resteasy-jackson" \ + -DnoExamples \ && mv kafka-streams-quickstart-aggregator aggregator ---- -This creates the `aggregator` project with the Quarkus extension for Kafka Streams and with RESTEasy support for JSON-B. +This creates the `aggregator` project with the Quarkus extension for Kafka Streams and with RESTEasy support for Jackson. If you already have your Quarkus project configured, you can add the `kafka-streams` extension to your project by running the following command in your project base directory: @@ -385,7 +381,7 @@ import org.apache.kafka.streams.kstream.Produced; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.Stores; -import io.quarkus.kafka.client.serialization.JsonbSerde; +import io.quarkus.kafka.client.serialization.ObjectMapperSerde; @ApplicationScoped public class TopologyProducer { @@ -400,9 +396,9 @@ public class TopologyProducer { public Topology buildTopology() { StreamsBuilder builder = new StreamsBuilder(); - JsonbSerde weatherStationSerde = new JsonbSerde<>( + ObjectMapperSerde weatherStationSerde = new ObjectMapperSerde<>( WeatherStation.class); - JsonbSerde aggregationSerde = new JsonbSerde<>(Aggregation.class); + ObjectMapperSerde aggregationSerde = new ObjectMapperSerde<>(Aggregation.class); KeyValueBytesStoreSupplier storeSupplier = Stores.persistentKeyValueStore( WEATHER_STATIONS_STORE); @@ -472,6 +468,8 @@ e.g. via environment variables or system properties. `topics` is specific to Quarkus: the application will wait for all the given topics to exist before launching the Kafka Streams engine. This is to done to gracefully await the creation of topics that don't yet exist at application startup time. +TIP: Alternatively, you can use `kafka.bootstrap.servers` instead of `quarkus.kafka-streams.bootstrap-servers` as you did in the _generator_ project above. + All the properties within the `kafka-streams` namespace are passed through as-is to the Kafka Streams engine. Changing their values requires a rebuild of the application. @@ -479,7 +477,7 @@ Changing their values requires a rebuild of the application. We now can build the `producer` and `aggregator` applications: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -f producer/pom.xml ./mvnw clean package -f aggregator/pom.xml @@ -500,7 +498,7 @@ version: '3.5' services: zookeeper: - image: strimzi/kafka:0.11.3-kafka-2.1.0 + image: strimzi/kafka:0.19.0-kafka-2.5.0 command: [ "sh", "-c", "bin/zookeeper-server-start.sh config/zookeeper.properties" @@ -512,7 +510,7 @@ services: networks: - kafkastreams-network kafka: - image: strimzi/kafka:0.11.3-kafka-2.1.0 + image: strimzi/kafka:0.19.0-kafka-2.5.0 command: [ "sh", "-c", "bin/kafka-server-start.sh config/server.properties --override listeners=$${KAFKA_LISTENERS} --override advertised.listeners=$${KAFKA_ADVERTISED_LISTENERS} --override zookeeper.connect=$${KAFKA_ZOOKEEPER_CONNECT} --override num.partitions=$${KAFKA_NUM_PARTITIONS}" @@ -558,19 +556,21 @@ networks: To launch all the containers, building the `producer` and `aggregator` container images, run `docker-compose up --build`. +TIP: Instead of `QUARKUS_KAFKA_STREAMS_BOOTSTRAP_SERVERS`, you can use `KAFKA_BOOTSTRAP_SERVERS`. + You should see log statements from the `producer` application about messages being sent to the "temperature-values" topic. Now run an instance of the _debezium/tooling_ image, attaching to the same network all the other containers run in. This image provides several useful tools such as _kafkacat_ and _httpie_: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- -docker run --tty --rm -i --network ks debezium/tooling:1.0 +docker run --tty --rm -i --network ks debezium/tooling:1.1 ---- Within the tooling container, run _kafkacat_ to examine the results of the streaming pipeline: -[source, subs=attributes+] +[source,subs=attributes+] ---- kafkacat -b kafka:9092 -C -o beginning -q -t temperatures-aggregated @@ -731,11 +731,9 @@ import java.util.List; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; -import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; @@ -752,8 +750,6 @@ public class WeatherStationEndpoint { @GET @Path("/data/{id}") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) public Response getWeatherStationData(@PathParam("id") int id) { GetWeatherStationDataResult result = interactiveQueries.getWeatherStationData(id); @@ -771,7 +767,7 @@ public class WeatherStationEndpoint { With this code in place, it's time to rebuild the application and the `aggregator` service in Docker Compose: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -f aggregator/pom.xml docker-compose stop aggregator @@ -1021,7 +1017,7 @@ public class WeatherStationEndpoint { Now stop the `aggregator` service again and rebuild it. Then let's spin up three instances of it: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- ./mvnw clean package -f aggregator/pom.xml docker-compose stop aggregator @@ -1078,7 +1074,8 @@ http 2af13fe516a9:8080/weather-stations/data/1 If that node holds the data for key "1", you'll get a response like this: -``` +[source] +---- HTTP/1.1 200 OK Connection: keep-alive Content-Length: 74 @@ -1093,23 +1090,25 @@ Date: Tue, 11 Jun 2019 19:16:31 GMT "stationId": 1, "stationName": "Hamburg" } -``` +---- Otherwise, the service will send a redirect: -``` +[source] +---- HTTP/1.1 303 See Other Connection: keep-alive Content-Length: 0 Date: Tue, 18 Jun 2019 20:01:03 GMT Location: http://1eb39af8d587:8080/weather-stations/data/1 -``` +---- You can also have _httpie_ automatically follow the redirect by passing the `--follow option`: -```bash +[source,bash] +---- http --follow 2af13fe516a9:8080/weather-stations/data/1 -``` +---- == Running Natively @@ -1119,7 +1118,7 @@ natively via GraalVM without further configuration. To run both the `producer` and `aggregator` applications in native mode, the Maven builds can be executed using the `native` profile: -[source, shell] +[source,bash] ---- ./mvnw clean package -f producer/pom.xml -Pnative -Dnative-image.container-runtime=docker ./mvnw clean package -f aggregator/pom.xml -Pnative -Dnative-image.container-runtime=docker @@ -1127,7 +1126,7 @@ the Maven builds can be executed using the `native` profile: Now create an environment variable named `QUARKUS_MODE` and with value set to "native": -[source, shell] +[source,bash] ---- export QUARKUS_MODE=native ---- @@ -1136,7 +1135,7 @@ This is used by the Docker Compose file to use the correct `Dockerfile` when bui The Kafka Streams application can work with less than 50 MB RSS in native mode. To do so, add the `Xmx` option to the program invocation in `aggregator/src/main/docker/Dockerfile.native`: -[source, shell] +[source,dockerfile] ---- CMD ["./application", "-Dquarkus.http.host=0.0.0.0", "-Xmx32m"] ---- @@ -1151,12 +1150,12 @@ If you are using the `quarkus-smallrye-health` extension, `quarkus-kafka-streams * a readiness health check to validate that all topics declared in the `quarkus.kafka-streams.topics` property are created, * a liveness health check based on the Kafka Streams state. -So when you access the `/health` endpoint of your application you will have information about the state of the Kafka Streams and the available and/or missing topics. +So when you access the `/q/health` endpoint of your application you will have information about the state of the Kafka Streams and the available and/or missing topics. This is an example of when the status is `DOWN`: [source, subs=attributes+] ---- -curl -i http://aggregator:8080/health +curl -i http://aggregator:8080/q/health HTTP/1.1 503 Service Unavailable content-type: application/json; charset=UTF-8 @@ -1183,8 +1182,8 @@ content-length: 454 ] } ---- -<1> Liveness health check. Also available at `/health/live` endpoint. -<2> Readiness health check. Also available at `/health/ready` endpoint. +<1> Liveness health check. Also available at `/q/health/live` endpoint. +<2> Readiness health check. Also available at `/q/health/ready` endpoint. So as you can see, the status is `DOWN` as soon as one of the `quarkus.kafka-streams.topics` is missing or the Kafka Streams `state` is not `RUNNING`. @@ -1193,13 +1192,15 @@ As well as if no topics are missing, the `missing_topics` key will not be presen You can of course disable the health check of the `quarkus-kafka-streams` extension by setting the `quarkus.kafka-streams.health.enabled` property to `false` in your `application.properties`. -Obviously you can create your liveness and readiness probes based on the respective endpoints `/health/live` and `/health/ready`. +Obviously you can create your liveness and readiness probes based on the respective endpoints `/q/health/live` and `/q/health/ready`. === Liveness health check Here is an example of the liveness check: -``` -curl -i http://aggregator:8080/health/live + +[source] +---- +curl -i http://aggregator:8080/q/health/live HTTP/1.1 503 Service Unavailable content-type: application/json; charset=UTF-8 @@ -1217,14 +1218,16 @@ content-length: 225 } ] } -``` +---- The `state` is coming from the `KafkaStreams.State` enum. === Readiness health check Here is an example of the readiness check: -``` -curl -i http://aggregator:8080/health/ready + +[source] +---- +curl -i http://aggregator:8080/q/health/ready HTTP/1.1 503 Service Unavailable content-type: application/json; charset=UTF-8 @@ -1242,15 +1245,14 @@ content-length: 265 } ] } - -``` +---- == Going Further This guide has shown how you can build stream processing applications using Quarkus and the Kafka Streams APIs, both in JVM and native modes. For running your KStreams application in production, you could also add health checks and metrics for the data pipeline. -Refer to the Quarkus guides on link:microprofile-metrics[metrics] and link:microprofile-health[health checks] to learn more. +Refer to the Quarkus guides on link:micrometer[Micrometer], link:microprofile-metrics[MicroProfile Metrics], and link:microprofile-health[health checks] to learn more. == Configuration Reference diff --git a/docs/src/main/asciidoc/kafka.adoc b/docs/src/main/asciidoc/kafka.adoc index 80b555fba604d..553910a150c30 100644 --- a/docs/src/main/asciidoc/kafka.adoc +++ b/docs/src/main/asciidoc/kafka.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Apache Kafka with Reactive Messaging @@ -43,12 +43,14 @@ The solution is located in the `kafka-quickstart` {quickstarts-tree-url}/kafka-q First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=kafka-quickstart \ - -Dextensions="kafka" + -DclassName="org.acme.kafka.PriceResource" \ + -Dpath="/prices" \ + -Dextensions="resteasy,smallrye-reactive-messaging-kafka" cd kafka-quickstart ---- @@ -59,7 +61,7 @@ to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="kafka" +./mvnw quarkus:add-extension -Dextensions="smallrye-reactive-messaging-kafka" ---- This will add the following to your `pom.xml`: @@ -84,7 +86,7 @@ version: '2' services: zookeeper: - image: strimzi/kafka:0.11.3-kafka-2.1.0 + image: strimzi/kafka:0.19.0-kafka-2.5.0 command: [ "sh", "-c", "bin/zookeeper-server-start.sh config/zookeeper.properties" @@ -95,7 +97,7 @@ services: LOG_DIR: /tmp/logs kafka: - image: strimzi/kafka:0.11.3-kafka-2.1.0 + image: strimzi/kafka:0.19.0-kafka-2.5.0 command: [ "sh", "-c", "bin/kafka-server-start.sh config/server.properties --override listeners=$${KAFKA_LISTENERS} --override advertised.listeners=$${KAFKA_ADVERTISED_LISTENERS} --override zookeeper.connect=$${KAFKA_ZOOKEEPER_CONNECT}" @@ -123,12 +125,13 @@ Create the `src/main/java/org/acme/kafka/PriceGenerator.java` file, with the fol ---- package org.acme.kafka; -import io.reactivex.Flowable; -import org.eclipse.microprofile.reactive.messaging.Outgoing; +import java.time.Duration; +import java.util.Random; import javax.enterprise.context.ApplicationScoped; -import java.util.Random; -import java.util.concurrent.TimeUnit; + +import io.smallrye.mutiny.Multi; +import org.eclipse.microprofile.reactive.messaging.Outgoing; /** * A bean producing random prices every 5 seconds. @@ -140,15 +143,16 @@ public class PriceGenerator { private Random random = new Random(); @Outgoing("generated-price") // <1> - public Flowable generate() { // <2> - return Flowable.interval(5, TimeUnit.SECONDS) + public Multi generate() { // <2> + return Multi.createFrom().ticks().every(Duration.ofSeconds(5)) + .onOverflow().drop() .map(tick -> random.nextInt(100)); } } ---- <1> Instruct Reactive Messaging to dispatch the items from returned stream to `generated-price`. -<2> The method returns a RX Java 2 _stream_ (`Flowable`) emitting a random _price_ every 5 seconds. +<2> The method returns a Mutiny _stream_ (`Multi`) emitting a random _price_ every 5 seconds. The method returns a _Reactive Stream_. The generated items are sent to the stream named `generated-price`. This stream is mapped to Kafka using the `application.properties` file that we will create soon. @@ -163,6 +167,7 @@ Create the `src/main/java/org/acme/kafka/PriceConverter.java` file with the foll package org.acme.kafka; import io.smallrye.reactive.messaging.annotations.Broadcast; +import org.eclipse.microprofile.reactive.messaging.Acknowledgment; import org.eclipse.microprofile.reactive.messaging.Incoming; import org.eclipse.microprofile.reactive.messaging.Outgoing; @@ -177,9 +182,10 @@ public class PriceConverter { private static final double CONVERSION_RATE = 0.88; - @Incoming("prices") // <1> - @Outgoing("my-data-stream") // <2> - @Broadcast // <3> + @Incoming("prices") // <1> + @Outgoing("my-data-stream") // <2> + @Broadcast // <3> + @Acknowledgment(Acknowledgment.Strategy.PRE_PROCESSING) // <4> public double process(int priceInUsd) { return priceInUsd * CONVERSION_RATE; } @@ -189,6 +195,7 @@ public class PriceConverter { <1> Indicates that the method consumes the items from the `prices` topic <2> Indicates that the objects returned by the method are sent to the `my-data-stream` stream <3> Indicates that the item are dispatched to all _subscribers_ +<4> Make sure to acknowledge the incoming message The `process` method is called for every Kafka _record_ from the `prices` topic (configured in the application configuration). Every result is sent to the `my-data-stream` in-memory stream. @@ -247,7 +254,7 @@ The `channel-name` segment must match the value set in the `@Incoming` and `@Out * `generated-price` -> sink in which we write the prices * `prices` -> source in which we read the prices -[source] +[source,properties] ---- # Configure the SmallRye Kafka connector kafka.bootstrap.servers=localhost:9092 @@ -255,6 +262,7 @@ kafka.bootstrap.servers=localhost:9092 # Configure the Kafka sink (we write to it) mp.messaging.outgoing.generated-price.connector=smallrye-kafka mp.messaging.outgoing.generated-price.topic=prices +mp.messaging.incoming.prices.health-readiness-enabled=false mp.messaging.outgoing.generated-price.value.serializer=org.apache.kafka.common.serialization.IntegerSerializer # Configure the Kafka source (we read from it) @@ -266,6 +274,18 @@ More details about this configuration is available on the https://kafka.apache.o NOTE: What about `my-data-stream`? This is an in-memory stream, not connected to a message broker. +[TIP] +.What is "mp.messaging.incoming.prices.health-readiness-enabled=false"? +==== +The `health-readiness-enabled` disables the readiness health check. +By default, it verifies that there is an active connection with the broker. +In our case, the connection only happens when we get the first consumer. +This is because the stream is consumed as an SSE, waiting lazily for the first connection to trigger the whole stream. +So, if you are running in an environment only routing traffic to containers that are _ready_ (such as Kubernetes), it would not send traffic to your application, which, as a consequence, will never connect to Kafka and pass the readiness check. + +More details about health reporting is given in <>. +==== + == The HTML page Final touch, the HTML page reading the converted prices using SSE. @@ -311,7 +331,7 @@ Nothing spectacular here. On each received price, it updates the page. If you followed the instructions, you should have Kafka running. Then, you just need to run the application using: -[source, shell] +[source,bash] ---- ./mvnw quarkus:dev ---- @@ -324,7 +344,7 @@ NOTE: If you started the Kafka broker with docker compose, stop it using `CTRL+C You can build the native executable with: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative ---- @@ -377,14 +397,46 @@ The `io.smallrye.reactive.messaging.annotations.Emitter`, `io.smallrye.reactive. The new `Emitter.send` method returns a `CompletionStage` completed when the produced message is acknowledged. ==== -== Kafka Health Check +[[kafka-health-check]] +== Kafka Health Checks + +Quarkus provides several health checks for Kafka. +These checks are used in combination with the `quarkus-smallrye-health` extension. + +When using the `quarkus-kafka` extension, you can enable _readiness_ health check by setting the `quarkus.kafka.health.enabled` property to `true` in your `application.properties`. +This check reports the status of the interaction with a _default_ Kafka broker (configured using `kafka.bootstrap.servers`). +That check requires an _admin connection_ with the Kafka broker. +This check is disabled by default. +If enabled, when you access the `/q/health/ready` endpoint of your application, you will have information about the connection validation status. + +When using Reactive Messaging and the Kafka connector, each configured channel (incoming or outgoing) provides a _liveness_ and _readiness_ check. +The _liveness_ check captures any unrecoverable failure happening during the communication with Kafka. +The _readiness_ check verifies that communication with Kafka is established. +For each channel, you can disable the checks using: + +[source, properties] +---- +# Disable both liveness and readiness checks with `health-enabled=false`: + +# Incoming channel (receiving records form Kafka) +mp.messaging.incoming.your-channel.health-enabled=false +# Outgoing channel (writing records to Kafka) +mp.messaging.outgoing.your-channel.health-enabled=false + +# Disable only the readiness check with `health-readiness-enabled=false`: -If you are using the `quarkus-smallrye-health` extension, `quarkus-kafka` can add a readiness health check -to validate the connection to the broker. This is disabled by default. +mp.messaging.incoming.your-channel.health-readiness-enabled=false +mp.messaging.outgoing.your-channel.health-readiness-enabled=false +---- + +NOTE: You can configure the `bootstrap.servers` for each channel. Defaults is `kafka.bootstrap.servers`. -If enabled, when you access the `/health/ready` endpoint of your application you will have information about the connection validation status. +Reactive Messaging readiness check offers two strategies. +The default strategy verifies that an active connection is established with the broker. +This approach is not intrusive as it's based on built-in metrics. -This behavior can be enabled by setting the `quarkus.kafka.health.enabled` property to `true` in your `application.properties`. +Using the `health-readiness-topic-verification=true` attribute, you can also check the topics used by the application exist in the broker. +Note that, to achieve this, an _admin connection_ is required. == JSON serialization @@ -431,7 +483,7 @@ public class FruitProcessor { @Incoming("fruit-in") @Outgoing("fruit-out") @Broadcast - public double process(Fruit fruit) { + public Fruit process(Fruit fruit) { fruit.price = fruit.price * CONVERSION_RATE; return fruit; } @@ -439,34 +491,34 @@ public class FruitProcessor { } ---- -To do this, we will need to setup JSON serialization with JSON-B or Jackson. +To do this, we will need to setup JSON serialization with Jackson or JSON-B. NOTE: With JSON serialization correctly configured, you can also use `Publisher` and `Emitter`. -=== Serializing via JSON-B +=== Serializing via Jackson -First, you need to include the `quarkus-jsonb` extension (if you already use the `quarkus-resteasy-jsonb` extension, this is not needed). +First, you need to include the `quarkus-jackson` extension (if you already use the `quarkus-resteasy-jackson` extension, this is not needed). [source, xml] ---- io.quarkus - quarkus-jsonb + quarkus-jackson ---- -There is an existing `JsonbSerializer` that can be used to serialize all pojos via JSON-B, +There is an existing `ObjectMapperSerializer` that can be used to serialize all pojos via Jackson, but the corresponding deserializer is generic, so it needs to be subclassed. -So, let's create a `FruitDeserializer` that extends the generic `JsonbDeserializer`. +So, let's create a `FruitDeserializer` that extends the `ObjectMapperDeserializer`. [source,java] ---- -package com.acme.fruit.jsonb; +package com.acme.fruit.jackson; -import io.quarkus.kafka.client.serialization.JsonbDeserializer; +import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer; -public class FruitDeserializer extends JsonbDeserializer { +public class FruitDeserializer extends ObjectMapperDeserializer { public FruitDeserializer(){ // pass the class to the parent. super(Fruit.class); @@ -474,50 +526,47 @@ public class FruitDeserializer extends JsonbDeserializer { } ---- -NOTE: If you don't want to create a deserializer for each of your pojo, you can use the generic `io.vertx.kafka.client.serialization.JsonObjectDeserializer` -that will deserialize to a `javax.json.JsonObject`. The corresponding serializer can also be used: `io.vertx.kafka.client.serialization.JsonObjectSerializer`. - -Finally, configure your streams to use the JSON-B serializer and deserializer. +Finally, configure your streams to use the Jackson serializer and deserializer. [source,properties] ---- # Configure the Kafka source (we read from it) mp.messaging.incoming.fruit-in.connector=smallrye-kafka mp.messaging.incoming.fruit-in.topic=fruit-in -mp.messaging.incoming.fruit-in.value.deserializer=com.acme.fruit.jsonb.FruitDeserializer +mp.messaging.incoming.fruit-in.value.deserializer=com.acme.fruit.jackson.FruitDeserializer # Configure the Kafka sink (we write to it) mp.messaging.outgoing.fruit-out.connector=smallrye-kafka mp.messaging.outgoing.fruit-out.topic=fruit-out -mp.messaging.outgoing.fruit-out.value.serializer=io.quarkus.kafka.client.serialization.JsonbSerializer +mp.messaging.outgoing.fruit-out.value.serializer=io.quarkus.kafka.client.serialization.ObjectMapperSerializer ---- -Now, your Kafka messages will contain a JSON-B serialized representation of your Fruit pojo. +Now, your Kafka messages will contain a Jackson serialized representation of your Fruit pojo. -=== Serializing via Jackson +=== Serializing via JSON-B -First, you need to include the `quarkus-jackson` extension (if you already use the `quarkus-resteasy-jackson` extension, this is not needed). +First, you need to include the `quarkus-jsonb` extension (if you already use the `quarkus-resteasy-jsonb` extension, this is not needed). [source, xml] ---- io.quarkus - quarkus-jackson + quarkus-jsonb ---- -There is an existing `ObjectMapperSerializer` that can be used to serialize all pojos via Jackson, +There is an existing `JsonbSerializer` that can be used to serialize all pojos via JSON-B, but the corresponding deserializer is generic, so it needs to be subclassed. -So, let's create a `FruitDeserializer` that extends the `ObjectMapperDeserializer`. +So, let's create a `FruitDeserializer` that extends the generic `JsonbDeserializer`. [source,java] ---- -package com.acme.fruit.jackson; +package com.acme.fruit.jsonb; -import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer; +import io.quarkus.kafka.client.serialization.JsonbDeserializer; -public class FruitDeserializer extends ObjectMapperDeserializer { +public class FruitDeserializer extends JsonbDeserializer { public FruitDeserializer(){ // pass the class to the parent. super(Fruit.class); @@ -525,22 +574,25 @@ public class FruitDeserializer extends ObjectMapperDeserializer { } ---- -Finally, configure your streams to use the Jackson serializer and deserializer. +NOTE: If you don't want to create a deserializer for each of your pojo, you can use the generic `io.vertx.kafka.client.serialization.JsonObjectDeserializer` +that will deserialize to a `javax.json.JsonObject`. The corresponding serializer can also be used: `io.vertx.kafka.client.serialization.JsonObjectSerializer`. + +Finally, configure your streams to use the JSON-B serializer and deserializer. [source,properties] ---- # Configure the Kafka source (we read from it) mp.messaging.incoming.fruit-in.connector=smallrye-kafka mp.messaging.incoming.fruit-in.topic=fruit-in -mp.messaging.incoming.fruit-in.value.deserializer=com.acme.fruit.jackson.FruitDeserializer +mp.messaging.incoming.fruit-in.value.deserializer=com.acme.fruit.jsonb.FruitDeserializer # Configure the Kafka sink (we write to it) mp.messaging.outgoing.fruit-out.connector=smallrye-kafka mp.messaging.outgoing.fruit-out.topic=fruit-out -mp.messaging.outgoing.fruit-out.value.serializer=io.quarkus.kafka.client.serialization.ObjectMapperSerializer +mp.messaging.outgoing.fruit-out.value.serializer=io.quarkus.kafka.client.serialization.JsonbSerializer ---- -Now, your Kafka messages will contain a Jackson serialized representation of your Fruit pojo. +Now, your Kafka messages will contain a JSON-B serialized representation of your Fruit pojo. === Sending JSON Server-Sent Events (SSE) @@ -562,7 +614,7 @@ import javax.ws.rs.core.MediaType; import org.jboss.resteasy.annotations.SseElementType; @Path("/fruits") -public class PriceResource { +public class FruitResource { @Inject @Channel("fruit-out") Publisher fruits; @@ -611,6 +663,475 @@ public class PriceStorage { The complete example is available in the `kafka-panache-quickstart` {quickstarts-tree-url}/kafka-panache-quickstart[directory]. +[NOTE] +==== +There are 2 `@Blocking` annotations: + + 1. `io.smallrye.reactive.messaging.annotations.Blocking` + 2. `io.smallrye.common.annotation.Blocking` + +They have the same effect. +Thus, you can use both. +The first one provides more fine-grain tuning such as the worker pool to use and whether it preserves the order. +The second one, used in also with other reactive features of Quarkus, uses the default worker pool and preserves the order. +==== + +== Testing a Kafka application + +=== Testing without a broker + +It can be useful to test the application without having to start a Kafka broker. +To achieve this, you can _switch_ the channels managed by the Kafka connector to _in-memory_. + +IMPORTANT: This approach only works for JVM tests. It cannot be used for native tests (because they do not support injection). + +First, add the following dependency to your application: + +[source, xml] +---- + + io.smallrye.reactive + smallrye-reactive-messaging-in-memory + test + +---- + +Then, create a Quarkus Test Resource as follows: + +[source, java] +---- +public class KafkaTestResourceLifecycleManager implements QuarkusTestResourceLifecycleManager { + + @Override + public Map start() { + Map env = new HashMap<>(); + Map props1 = InMemoryConnector.switchIncomingChannelsToInMemory("orders"); // <1> + Map props2 = InMemoryConnector.switchOutgoingChannelsToInMemory("queue"); // <2> + env.putAll(props1); + env.putAll(props2); + return env; // <3> + } + + @Override + public void stop() { + InMemoryConnector.clear(); // <4> + } +} +---- +1. Switch the incoming channel "orders" (expecting messages from Kafka) to in-memory. +2. Switch the outgoing channel "queue" (writing messages to Kafka) to in-memory. +3. Builds and returns a `Map` containing all the properties required to configure the application to use in-memory channels. +4. When the test stops, clear the `InMemoryConnector` (discard all the received and sent messages) + +Create a Quarkus Test using the test resource created above: + +[source, java] +---- +@QuarkusTest +@QuarkusTestResource(KafkaTestResourceLifecycleManager.class) +class BaristaTest { + + @Inject @Any + InMemoryConnector connector; // <1> + + @Test + void testProcessOrder() { + InMemorySource orders = connector.source("orders"); // <2> + InMemorySink queue = connector.sink("queue"); // <3> + + Order order = new Order(); + order.setProduct("coffee"); + order.setName("Coffee lover"); + order.setOrderId("1234"); + + orders.send(order); // <4> + + await().>>until(queue::received, t -> t.size() == 1); // <5> + + Beverage queuedBeverage = queue.received().get(0).getPayload(); + Assertions.assertEquals(Beverage.State.READY, queuedBeverage.getPreparationState()); + Assertions.assertEquals("coffee", queuedBeverage.getBeverage()); + Assertions.assertEquals("Coffee lover", queuedBeverage.getCustomer()); + Assertions.assertEquals("1234", queuedBeverage.getOrderId()); + } + +} +---- +1. Inject the in-memory connector in your test class. +2. Retrieve the incoming channel (`orders`) - the channel must have been switched to in-memory in the test resource. +3. Retrieve the outgoing channel (`queue`) - the channel must have been switched to in-memory in the test resource. +4. Use the `send` method to send a message to the `orders` channel. So, the application will process this message. +5. Use the `received` method to check the messages produced by the application. + +=== Starting Kafka in a test resource + +Alternatively, you can start a Kafka broker in a test resource. +The following snippet shows a test resource starting a Kafka broker using https://www.testcontainers.org/modules/kafka/[Testcontainers]: + +[source, java] +---- +public class KafkaResource implements QuarkusTestResourceLifecycleManager { + + private final KafkaContainer kafka = new KafkaContainer(); + + @Override + public Map start() { + kafka.start(); + return Collections.singletonMap("kafka.bootstrap.servers", kafka.getBootstrapServers()); // <1> + } + + @Override + public void stop() { + kafka.close(); + } +} +---- +1. Configure the Kafka bootstrap location, so the application connects to this broker. + +== Authenticating with OAuth + +If your Kafka broker uses OAuth as authentication mechanism, you need to configure the Kafka consumer to enable this authentication process. +First, add the following dependency to your application: + +[source, xml] +---- + + io.strimzi + kafka-oauth-client + +---- + +This dependency provides the callback handler required to handle the OAuth workflow. +Then, in the `application.properties`, add: + +[source, properties] +---- +mp.messaging.connector.smallrye-kafka.security.protocol=SASL_PLAINTEXT +mp.messaging.connector.smallrye-kafka.sasl.mechanism=OAUTHBEARER +mp.messaging.connector.smallrye-kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.client.id="team-a-client" \ + oauth.client.secret="team-a-client-secret" \ + oauth.token.endpoint.uri="http://keycloak:8080/auth/realms/kafka-authz/protocol/openid-connect/token" ; +mp.messaging.connector.smallrye-kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +---- + +Update the `oauth.client.id`, `oauth.client.secret` and `oauth.token.endpoint.uri` values. + +OAuth authentication works for both JVM and native modes. + +== Using Snappy + +On _outgoing_ channels, you can enable Snappy compression by setting the `compression.type` attribute to `snappy`: + +[source, properties] +---- +mp.messaging.outgoing.fruit-out.compression.type=snappy +---- + +In JVM mode, it will work out of the box. +However, to compile your application to a native executable, you need to: + +1. Uses GraalVM 21.+ +2. Add `quarkus.kafka.snappy.enabled=true` to your `application.properties` + +In native mode, Snappy is disabled by default as the use of Snappy requires embedding a native library and unpacking it when the application starts. + +== Configuration + +More details about the SmallRye Reactive Messaging configuration can be found in the https://smallrye.io/smallrye-reactive-messaging/smallrye-reactive-messaging/2.8/kafka/kafka.html[SmallRye Reactive Messaging - Kafka Connector Documentation]. +The most important attributes are listed in the tables below: + +=== Incoming channel configuration (polling from Kafka) + +The following attributes are configured using: + +[source, properties] +---- +mp.messaging.incoming.your-channel-name.attribute=value +---- + +Some properties have aliases which can be configured globally: + +[source, properties] +---- +kafka.bootstrap.servers=... +---- + +.Incoming Attributes of the 'smallrye-kafka' connector +[cols="25, 30, 15, 20",options="header"] +|=== +|Attribute (_alias_) | Description | Mandatory | Default + +| *bootstrap.servers* + +_(kafka.bootstrap.servers)_ | A comma-separated list of host:port to use for establishing the initial connection to the Kafka cluster. + +Type: _string_ | false | `localhost:9092` + +| *topic* | The consumed / populated Kafka topic. If neither this property nor the `topics` properties are set, the channel name is used + +Type: _string_ | false | + +| *health-enabled* | Whether health reporting is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *health-readiness-enabled* | Whether readiness health reporting is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *health-readiness-topic-verification* | Whether the readiness check should verify that topics exist on the broker. Default to false. Enabling it requires an admin connection. + +Type: _boolean_ | false | `false` + +| *health-readiness-timeout* | During the readiness health check, the connector connects to the broker and retrieves the list of topics. This attribute specifies the maximum duration (in ms) for the retrieval. If exceeded, the channel is considered not-ready. + +Type: _long_ | false | `2000` + +| *tracing-enabled* | Whether tracing is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *cloud-events* | Enables (default) or disables the Cloud Event support. If enabled on an _incoming_ channel, the connector analyzes the incoming records and try to create Cloud Event metadata. If enabled on an _outgoing_, the connector sends the outgoing messages as Cloud Event if the message includes Cloud Event Metadata. + +Type: _boolean_ | false | `true` + +| *topics* | A comma-separating list of topics to be consumed. Cannot be used with the `topic` or `pattern` properties + +Type: _string_ | false | + +| *pattern* | Indicate that the `topic` property is a regular expression. Must be used with the `topic` property. Cannot be used with the `topics` property + +Type: _boolean_ | false | `false` + +| *key.deserializer* | The deserializer classname used to deserialize the record's key + +Type: _string_ | false | `org.apache.kafka.common.serialization.StringDeserializer` + +| *value.deserializer* | The deserializer classname used to deserialize the record's value + +Type: _string_ | true | + +| *fetch.min.bytes* | The minimum amount of data the server should return for a fetch request. The default setting of 1 byte means that fetch requests are answered as soon as a single byte of data is available or the fetch request times out waiting for data to arrive. + +Type: _int_ | false | `1` + +| *group.id* | A unique string that identifies the consumer group the application belongs to. If not set, a unique, generated id is used + +Type: _string_ | false | + +| *enable.auto.commit* | If enabled, consumer's offset will be periodically committed in the background by the underlying Kafka client, ignoring the actual processing outcome of the records. It is recommended to NOT enable this setting and let Reactive Messaging handles the commit. + +Type: _boolean_ | false | `false` + +| *retry* | Whether or not the connection to the broker is re-attempted in case of failure + +Type: _boolean_ | false | `true` + +| *retry-attempts* | The maximum number of reconnection before failing. -1 means infinite retry + +Type: _int_ | false | `-1` + +| *retry-max-wait* | The max delay (in seconds) between 2 reconnects + +Type: _int_ | false | `30` + +| *broadcast* | Whether the Kafka records should be dispatched to multiple consumer + +Type: _boolean_ | false | `false` + +| *auto.offset.reset* | What to do when there is no initial offset in Kafka.Accepted values are earliest, latest and none + +Type: _string_ | false | `latest` + +| *failure-strategy* | Specify the failure strategy to apply when a message produced from a record is acknowledged negatively (nack). Values can be `fail` (default), `ignore`, or `dead-letter-queue` + +Type: _string_ | false | `fail` + +| *commit-strategy* | Specify the commit strategy to apply when a message produced from a record is acknowledged. Values can be `latest`, `ignore` or `throttled`. If `enable.auto.commit` is true then the default is `ignore` otherwise it is `throttled` + +Type: _string_ | false | + +| *throttled.unprocessed-record-max-age.ms* | While using the `throttled` commit-strategy, specify the max age in milliseconds that an unprocessed message can be before the connector is marked as unhealthy. + +Type: _int_ | false | `60000` + +| *dead-letter-queue.topic* | When the `failure-strategy` is set to `dead-letter-queue` indicates on which topic the record is sent. Defaults is `dead-letter-topic-$channel` + +Type: _string_ | false | + +| *dead-letter-queue.key.serializer* | When the `failure-strategy` is set to `dead-letter-queue` indicates the key serializer to use. If not set the serializer associated to the key deserializer is used + +Type: _string_ | false | + +| *dead-letter-queue.value.serializer* | When the `failure-strategy` is set to `dead-letter-queue` indicates the value serializer to use. If not set the serializer associated to the value deserializer is used + +Type: _string_ | false | + +| *partitions* | The number of partitions to be consumed concurrently. The connector creates the specified amount of Kafka consumers. It should match the number of partition of the targeted topic + +Type: _int_ | false | `1` + +| *consumer-rebalance-listener.name* | The name set in `javax.inject.Named` of a bean that implements `io.smallrye.reactive.messaging.kafka.KafkaConsumerRebalanceListener`. If set, this rebalance listener is applied to the consumer. + +Type: _string_ | false | + +| *key-deserialization-failure-handler* | The name set in `javax.inject.Named` of a bean that implements `io.smallrye.reactive.messaging.kafka.DeserializationFailureHandler`. If set, deserialization failure happening when deserializing keys are delegated to this handler which may provide a fallback value. + +Type: _string_ | false | + +| *value-deserialization-failure-handler* | The name set in `javax.inject.Named` of a bean that implements `io.smallrye.reactive.messaging.kafka.DeserializationFailureHandler`. If set, deserialization failure happening when deserializing values are delegated to this handler which may provide a fallback value. + +Type: _string_ | false | + +| *graceful-shutdown* | Whether or not a graceful shutdown should be attempted when the application terminates. + +Type: _boolean_ | false | `true` + +|=== + +=== Outgoing channel configuration (writing to Kafka) + +The following attributes are configured using: + + +[source, properties] +---- +mp.messaging.outgoing.your-channel-name.attribute=value +---- + +Some properties have aliases which can be configured globally: + +[source, properties] +---- +kafka.bootstrap.servers=... +---- + +.Outgoing Attributes of the 'smallrye-kafka' connector +[cols="25, 30, 15, 20",options="header"] +|=== +|Attribute (_alias_) | Description | Mandatory | Default + +| *acks* | The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the durability of records that are sent. Accepted values are: 0, 1, all + +Type: _string_ | false | `1` + +| *bootstrap.servers* + +_(kafka.bootstrap.servers)_ | A comma-separated list of host:port to use for establishing the initial connection to the Kafka cluster. + +Type: _string_ | false | `localhost:9092` + +| *buffer.memory* | The total bytes of memory the producer can use to buffer records waiting to be sent to the server. + +Type: _long_ | false | `33554432` + +| *close-timeout* | The amount of milliseconds waiting for a graceful shutdown of the Kafka producer + +Type: _int_ | false | `10000` + +| *cloud-events* | Enables (default) or disables the Cloud Event support. If enabled on an _incoming_ channel, the connector analyzes the incoming records and try to create Cloud Event metadata. If enabled on an _outgoing_, the connector sends the outgoing messages as Cloud Event if the message includes Cloud Event Metadata. + +Type: _boolean_ | false | `true` + +| *cloud-events-data-content-type* + +_(cloud-events-default-data-content-type)_ | Configure the default `datacontenttype` attribute of the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `datacontenttype` attribute itself + +Type: _string_ | false | + +| *cloud-events-data-schema* + +_(cloud-events-default-data-schema)_ | Configure the default `dataschema` attribute of the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `dataschema` attribute itself + +Type: _string_ | false | + +| *cloud-events-insert-timestamp* + +_(cloud-events-default-timestamp)_ | Whether or not the connector should insert automatically the `time` attribute` into the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `time` attribute itself + +Type: _boolean_ | false | `true` + +| *cloud-events-mode* | The Cloud Event mode (`structured` or `binary` (default)). Indicates how are written the cloud events in the outgoing record + +Type: _string_ | false | `binary` + +| *cloud-events-source* + +_(cloud-events-default-source)_ | Configure the default `source` attribute of the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `source` attribute itself + +Type: _string_ | false | + +| *cloud-events-subject* + +_(cloud-events-default-subject)_ | Configure the default `subject` attribute of the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `subject` attribute itself + +Type: _string_ | false | + +| *cloud-events-type* + +_(cloud-events-default-type)_ | Configure the default `type` attribute of the outgoing Cloud Event. Requires `cloud-events` to be set to `true`. This value is used if the message does not configure the `type` attribute itself + +Type: _string_ | false | + +| *health-enabled* | Whether health reporting is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *health-readiness-enabled* | Whether readiness health reporting is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *health-readiness-timeout* | During the readiness health check, the connector connects to the broker and retrieves the list of topics. This attribute specifies the maximum duration (in ms) for the retrieval. If exceeded, the channel is considered not-ready. + +Type: _long_ | false | `2000` + +| *health-readiness-topic-verification* | Whether the readiness check should verify that topics exist on the broker. Default to false. Enabling it requires an admin connection. + +Type: _boolean_ | false | `false` + +| *key* | A key to used when writing the record + +Type: _string_ | false | + +| *key.serializer* | The serializer classname used to serialize the record's key + +Type: _string_ | false | `org.apache.kafka.common.serialization.StringSerializer` + +| *max-inflight-messages* | The maximum number of messages to be written to Kafka concurrently. It limits the number of messages waiting to be written and acknowledged by the broker. You can set this attribute to `0` remove the limit + +Type: _long_ | false | `1024` + +| *merge* | Whether the connector should allow multiple upstreams + +Type: _boolean_ | false | `false` + +| *partition* | The target partition id. -1 to let the client determine the partition + +Type: _int_ | false | `-1` + +| *retries* | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. + +Type: _long_ | false | `2147483647` + +| *topic* | The consumed / populated Kafka topic. If neither this property nor the `topics` properties are set, the channel name is used + +Type: _string_ | false | + +| *tracing-enabled* | Whether tracing is enabled (default) or disabled + +Type: _boolean_ | false | `true` + +| *value.serializer* | The serializer classname used to serialize the payload + +Type: _string_ | true | + +| *waitForWriteCompletion* | Whether the client waits for Kafka to acknowledge the written record before acknowledging the message + +Type: _boolean_ | false | `true` + +|=== + == Going further This guide has shown how you can interact with Kafka using Quarkus. diff --git a/docs/src/main/asciidoc/kogito.adoc b/docs/src/main/asciidoc/kogito.adoc index 483f8b89af4a7..0b41facf0cef2 100644 --- a/docs/src/main/asciidoc/kogito.adoc +++ b/docs/src/main/asciidoc/kogito.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Kogito to add business automation capabilities to an application @@ -80,12 +80,13 @@ The solution is located in the `kogito-quickstart` {quickstarts-tree-url}/kogito First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=kogito-quickstart \ - -Dextensions="kogito" + -Dextensions="kogito" \ + -DnoExamples cd kogito-quickstart ---- @@ -242,7 +243,7 @@ Then run it: [source,bash] ---- -java -jar ./target/kogito-quickstart-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- === Running in Native Mode diff --git a/docs/src/main/asciidoc/kotlin.adoc b/docs/src/main/asciidoc/kotlin.adoc index 2918d0eb8f735..ba88483030e31 100644 --- a/docs/src/main/asciidoc/kotlin.adoc +++ b/docs/src/main/asciidoc/kotlin.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Kotlin @@ -29,20 +29,20 @@ NB: For Gradle project setup please see below, and for further reference consult First, we need a new Kotlin project. This can be done using the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-kotlin-quickstart \ -DclassName="org.acme.rest.GreetingResource" \ -Dpath="/greeting" \ - -Dextensions="kotlin,resteasy-jsonb" + -Dextensions="resteasy,kotlin,resteasy-jackson" cd rest-kotlin-quickstart ---- When adding `kotlin` to the extensions list, the Maven plugin will generate a project that is properly configured to work with Kotlin. Furthermore the `org.acme.rest.GreetingResource` class is implemented as Kotlin source code (as is the case with the generated tests). -The addition of `resteasy-jsonb` in the extension list results in importing the RESTEasy/JAX-RS and JSON-B extensions. +The addition of `resteasy-jackson` in the extension list results in importing the RESTEasy/JAX-RS and Jackson extensions. `GreetingResource.kt` looks like this: @@ -81,14 +81,12 @@ We also update the `GreetingResource.kt` like so: ---- import javax.ws.rs.GET import javax.ws.rs.Path -import javax.ws.rs.Produces import javax.ws.rs.core.MediaType @Path("/greeting") class GreetingResource { @GET - @Produces(MediaType.APPLICATION_JSON) fun hello() = Greeting("hello") } ---- @@ -192,123 +190,199 @@ Similar to the Maven configuration, when using Gradle, the following modificatio ---- plugins { id 'java' - id 'io.quarkus' version '{quarkus-version}' // <1> + id 'io.quarkus' - id "org.jetbrains.kotlin.jvm" version "{kotlin-version}" // <2> - id "org.jetbrains.kotlin.plugin.allopen" version "{kotlin-version}" // <2> + id "org.jetbrains.kotlin.jvm" version "{kotlin-version}" // <1> + id "org.jetbrains.kotlin.plugin.allopen" version "{kotlin-version}" // <1> } repositories { + mavenLocal() mavenCentral() } -group = '...' // set your group -version = '1.0.0-SNAPSHOT' - dependencies { implementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk8:{kotlin-version}' - // <3> - implementation enforcedPlatform('io.quarkus:quarkus-bom:{quarkus-version}') + implementation enforcedPlatform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}") + implementation 'io.quarkus:quarkus-resteasy' - implementation 'io.quarkus:quarkus-resteasy-jsonb' + implementation 'io.quarkus:quarkus-resteasy-jackson' implementation 'io.quarkus:quarkus-kotlin' testImplementation 'io.quarkus:quarkus-junit5' testImplementation 'io.rest-assured:rest-assured' } -sourceCompatibility = '1.8' -targetCompatibility = '1.8' - -test { - useJUnitPlatform() - exclude '**/Native*' -} +group = '...' // set your group +version = '1.0.0-SNAPSHOT' -buildNative { - enableHttpUrlHandler = true +java { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } -allOpen { // <4> +allOpen { // <2> annotation("javax.ws.rs.Path") annotation("javax.enterprise.context.ApplicationScoped") annotation("io.quarkus.test.junit.QuarkusTest") } + +compileKotlin { + kotlinOptions.jvmTarget = JavaVersion.VERSION_11 + kotlinOptions.javaParameters = true +} + +compileTestKotlin { + kotlinOptions.jvmTarget = JavaVersion.VERSION_11 +} ---- -<1> The Quarkus plugin needs to be applied. -<2> The Kotlin plugin version needs to be specified. -<3> We include the Quarkus BOM using Gradle's link:https://docs.gradle.org/5.4.1/userguide/managing_transitive_dependencies.html#sec:bom_import[relevant syntax] -<4> The all-open configuration required, as per Maven guide above +<1> The Kotlin plugin version needs to be specified. +<2> The all-open configuration required, as per Maven guide above or, if you use the Gradle Kotlin DSL: [source,kotlin,subs=attributes+] ---- -import io.quarkus.gradle.tasks.QuarkusNative -import org.jetbrains.kotlin.gradle.tasks.KotlinCompile - -group = "..." -version = "1.0.0-SNAPSHOT" plugins { - java - id("io.quarkus") version "{quarkus-version}" // <1> + kotlin("jvm") version "{kotlin-version}" // <1> + kotlin("plugin.allopen") version "{kotlin-version}" + id("io.quarkus") +} - kotlin("jvm") version "{kotlin-version}" // <2> - id("org.jetbrains.kotlin.plugin.allopen") version "{kotlin-version}" // <2> +repositories { + mavenLocal() + mavenCentral() } +val quarkusPlatformGroupId: String by project +val quarkusPlatformArtifactId: String by project +val quarkusPlatformVersion: String by project + +group = "..." +version = "1.0.0-SNAPSHOT" + + repositories { + mavenLocal() mavenCentral() } dependencies { implementation(kotlin("stdlib-jdk8")) - // <3> - implementation(enforcedPlatform("io.quarkus:quarkus-bom:${quarkusVersion}")) + implementation(enforcedPlatform("${quarkusPlatformGroupId}:${quarkusPlatformArtifactId}:${quarkusPlatformVersion}")) + implementation("io.quarkus:quarkus-kotlin") implementation("io.quarkus:quarkus-resteasy") - implementation("io.quarkus:quarkus-resteasy-jsonb") + implementation("io.quarkus:quarkus-resteasy-jackson") testImplementation("io.quarkus:quarkus-junit5") testImplementation("io.rest-assured:rest-assured") } -tasks { - named("buildNative") { - setEnableHttpUrlHandler(true) - } +group = '...' // set your group +version = "1.0.0-SNAPSHOT" + +java { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } -allOpen { // <4> +allOpen { // <2> annotation("javax.ws.rs.Path") annotation("javax.enterprise.context.ApplicationScoped") annotation("io.quarkus.test.junit.QuarkusTest") } -java { - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 +tasks.withType { + kotlinOptions.jvmTarget = JavaVersion.VERSION_11.toString() + kotlinOptions.javaParameters = true } -val compileKotlin: KotlinCompile by tasks -compileKotlin.kotlinOptions { - jvmTarget = "1.8" -} +---- -val compileTestKotlin: KotlinCompile by tasks -compileTestKotlin.kotlinOptions { - jvmTarget = "1.8" -} +<1> The Kotlin plugin version needs to be specified. +<2> The all-open configuration required, as per Maven guide above + + + +== Live reload + +Quarkus provides support for live reloading changes made to source code. This support is also available to Kotlin, meaning that developers can update their Kotlin source +code and immediately see their changes reflected. + +To see this feature in action, first execute: `./mvnw compile quarkus:dev` + +When executing an HTTP GET request against `http://localhost:8080/greeting`, you see a JSON message with the value `hello` as its `message` field. + +Now using your favorite editor or IDE, update `GreetingResource.kt` and change the `hello` method to the following: + +[source,kotlin] +---- +fun hello() = Greeting("hi") ---- -<1> The Quarkus plugin needs to be applied. -<2> The Kotlin plugin version needs to be specified. -<3> We include the Quarkus BOM using Gradle's link:https://docs.gradle.org/5.4.1/userguide/managing_transitive_dependencies.html#sec:bom_import[relevant syntax] -<4> The all-open configuration required, as per Maven guide above +When you now execute an HTTP GET request against `http://localhost:8080/greeting`, you should see a JSON message with the value `hi` as its `message` field. + +One thing to note is that the live reload feature is not available when making changes to both Java and Kotlin source that have dependencies on each other. We hope to alleviate this limitation in the future. + +== Packaging the application + +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also build the native executable using `./mvnw package -Pnative`, or `./gradlew buildNative`. + +== Kotlin and Jackson + +If the `com.fasterxml.jackson.module:jackson-module-kotlin` dependency and the `quarkus-jackson` extension (or the `quarkus-resteasy-jackson` extension) have been added to the project, +then Quarkus automatically registers the `KotlinModule` to the `ObjectMapper` bean (see link:rest-json#jackson[this] guide for more details). + +When using Kotlin data classes with `native-image` you may experience serialization errors that do not occur with the `JVM` version, despite the Kotlin Jackson Module being registered. This is especially so if you have a more complex JSON hierarchy, where an issue on a lower node causes a serialization failure. The error message displayed is a catch-all and typically displays an issue with the root object, which may not necessarily be the case. + +[source] +---- +com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot construct instance of `Address` (no Creators, like default construct, exist): cannot deserialize from Object value (no delegate- or property-based Creator) +---- + +To ensure full-compability with `native-image`, it is recommended to apply the Jackson `@field:JsonProperty("fieldName")` annotation, and set a nullable default, as illustrated below. You can automate the generation of Kotlin data classes for your sample JSON using Intellij plugins (such as JSON to Kotlin Class), and easily enable the Jackson annotation and select nullable parameters as part of the auto-code generation. + +[source,kotlin] +---- +import com.fasterxml.jackson.annotation.JsonProperty + +data class Response( + @field:JsonProperty("chart") + val chart: ChartData? = null +) + +data class ChartData( + @field:JsonProperty("result") + val result: List? = null, + + @field:JsonProperty("error") + val error: Any? = null +) + +data class ResultItem( + @field:JsonProperty("meta") + val meta: Meta? = null, + + @field:JsonProperty("indicators") + val indicators: IndicatorItems? = null, + + @field:JsonProperty("timestamp") + val timestamp: List? = null +) + +... +---- + + +== Kotlin coroutines and Mutiny + +Kotlin coroutines provide a imperative programming model that actually gets executed in an asynchronous, reactive manner. +To simplify the interoperation between Mutiny and Kotlin there is the module `io.smallrye.reactive:mutiny-kotlin`, described link:https://smallrye.io/smallrye-mutiny/guides/kotlin[here]. == CDI @Inject with Kotlin @@ -362,32 +436,3 @@ class GreetingResource { <1> Kotlin requires a @field: xxx qualifier as it has no @Target on the annotation definition. Add @field: xxx in this example. @Default is used as the qualifier, explicitly specifying the use of the default bean. - -== Live reload - -Quarkus provides support for live reloading changes made to source code. This support is also available to Kotlin, meaning that developers can update their Kotlin source -code and immediately see their changes reflected. - -To see this feature in action, first execute: `./mvnw compile quarkus:dev` - -When executing an HTTP GET request against `http://localhost:8080/greeting`, you see a JSON message with the value `hello` as its `message` field. - -Now using your favorite editor or IDE, update `GreetingResource.kt` and change the `hello` method to the following: - -[source,kotlin] ----- -fun hello() = Greeting("hi") ----- - -When you now execute an HTTP GET request against `http://localhost:8080/greeting`, you should see a JSON message with the value `hi` as its `message` field. - -One thing to note is that the live reload feature is not available when making changes to both Java and Kotlin source that have dependencies on each other. We hope to alleviate this limitation in the future. - -== Packaging the application - -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. You can also build the native executable using `./mvnw package -Pnative`, or `./gradlew buildNative`. - -== Kotlin and Jackson - -If the `com.fasterxml.jackson.module:jackson-module-kotlin` dependency and the `quarkus-jackson` extension (or the `quarkus-resteasy-extension`) have been added to project, -then Quarkus automatically registers the `KotlinModule` to the `ObjectMapper` bean (see link:rest-json#jackson[this] guide for more details). \ No newline at end of file diff --git a/docs/src/main/asciidoc/kubernetes-client.adoc b/docs/src/main/asciidoc/kubernetes-client.adoc index 3a86f2b54a665..5f0b86e57c978 100644 --- a/docs/src/main/asciidoc/kubernetes-client.adoc +++ b/docs/src/main/asciidoc/kubernetes-client.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Kubernetes Client @@ -21,7 +21,7 @@ Being able to write such operators in Java with the very low footprint that nati Once you have your Quarkus project configured you can add the `kubernetes-client` extension to your project by running the following command in your project base directory. -[source] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="kubernetes-client" ---- @@ -41,13 +41,13 @@ This will add the following to your `pom.xml`: Quarkus configures a Bean of type `KubernetesClient` which can be injected into application code using the well known CDI methods. This client can be configured using various properties as can be seen in the following example: -[source] +[source,properties] ---- quarkus.kubernetes-client.trust-certs=false quarkus.kubernetes-client.namespace=default ---- -Note that the full list of properties is available in the https://github.com/quarkusio/quarkus/blob/master/extensions/kubernetes-client/runtime-internal/src/main/java/io/quarkus/kubernetes/client/runtime/KubernetesClientBuildConfig.java[KubernetesClientBuildConfig] class. +Note that the full list of properties is available in the https://github.com/quarkusio/quarkus/blob/main/extensions/kubernetes-client/runtime-internal/src/main/java/io/quarkus/kubernetes/client/runtime/KubernetesClientBuildConfig.java[KubernetesClientBuildConfig] class. === Overriding @@ -58,7 +58,7 @@ An example of this can be seen in the following snippet: [source,java] ---- -@ApplicationScoped +@Singleton public class KubernetesClientProducer { @Produces @@ -71,9 +71,9 @@ public class KubernetesClientProducer { == Testing -To make testing against a mock Kubernetes API extremely simple, Quarkus provides the `KubernetesMockServerTestResource` which automatically launches +To make testing against a mock Kubernetes API extremely simple, Quarkus provides the `WithKubernetesTestServer` annotation which automatically launches a mock of the Kubernetes API server and sets the proper environment variables needed so that the Kubernetes Client configures itself to use said mock. -Tests can inject the mock and set it up in any way necessary for the particular testing using the `@MockServer` annotation. +Tests can inject the mock server and set it up in any way necessary for the particular testing using the `@KubernetesTestServer` annotation. Let's assume we have a REST endpoint defined like so: @@ -89,7 +89,6 @@ public class Pods { } @GET - @Produces(MediaType.APPLICATION_JSON) @Path("/{namespace}") public List pods(@PathParam("namespace") String namespace) { return kubernetesClient.pods().inNamespace(namespace).list().getItems(); @@ -101,12 +100,13 @@ We could write a test for this endpoint very easily like so: [source%nowrap,java] ---- -@QuarkusTestResource(KubernetesMockServerTestResource.class) +// you can even configure aspects like crud, https and port on this annotation +@WithKubernetesTestServer @QuarkusTest public class KubernetesClientTest { - @MockServer - KubernetesMockServer mockServer; + @KubernetesTestServer + KubernetesServer mockServer; @BeforeEach public void before() { @@ -140,9 +140,40 @@ Note that to take advantage of these features, the `quarkus-test-kubernetes-clie ---- +Alternately, you can create a `CustomKubernetesMockServerTestResource.java` to ensure all your `@QuarkusTest` enabled test classes share the same mock server setup: + +[source%nowrap,java] +---- +public class CustomKubernetesMockServerTestResource extends KubernetesServerTestResource { + + @Override + public void configureMockServer(KubernetesServer mockServer) { + mockServer.expect().get().withPath("/api/v1/namespaces/test/pods") + .andReturn(200, new PodList()) + .always(); + } +} +---- + +and use this in your other test classes as follows: +[source%nowrap,java] +---- +@QuarkusTestResource(KubernetesMockServerTestResource.class) +@QuarkusTest +public class KubernetesClientTest { + + //tests will now use the configured server... +} +---- + +Furthermore, to get a mock server that replies with empty lists by default (instead of getting 404 responses from the Kubernetes API), +you can use the `EmptyDefaultKubernetesMockServerTestResource.class` instead of +`KubernetesMockServerTestResource.class`. + + == Note on implementing the Watcher interface -Due to the restrictions imposed by GraalVM, extra care needs to be taken when implementing a `io.fabric8.kubernetes.client.Watcher` if the application is intended to work in native mode. +Due to the restrictions imposed by GraalVM, extra care needs to be taken when implementing an `io.fabric8.kubernetes.client.Watcher` if the application is intended to work in native mode. Essentially every `Watcher` implementation needs to specify the Kubernetes model class that it handles via the ``Watcher``'s generic type at class definition time. To better understand this, suppose we want to watch for changes to Kubernetes `Pod` resources. There are a couple ways to write such a `Watcher` that are guaranteed to work in native: @@ -264,6 +295,71 @@ subjects: Replace `` and `` with your values. Have a look at https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/[Configure Service Accounts for Pods] to get further information. +== OpenShift Client + +If the targeted Kubernetes cluster is an OpenShift cluster, it is possible to access it through +the `openshift-client` extension, in a similar way. This leverages the dedicated fabric8 +openshift client, and provides access to `OpenShift` proprietary objects (e.g. `Route`, `ProjectRequest`, `BuildConfig` ...) + +Note that the configuration properties are shared with the `kubernetes-client` extension. In +particular they have the same `quarkus.kubernetes-client` prefix. + +Add the extension with: + +[source] +---- +./mvnw quarkus:add-extension -Dextensions="openshift-client" +---- + +Note that `openshift-client` extension has a dependency on the `kubernetes-client` extension. + +To use the client, inject an `OpenShiftClient` instead of the `KubernetesClient`: + +[source, java] +---- +@Inject +private OpenShiftClient openshiftClient; +---- + +If you need to override the default `OpenShiftClient`, provide a producer such as: + +[source, java] +---- +@Singleton +public class OpenShiftClientProducer { + + @Produces + public OpenShiftClient openshiftClient() { + // here you would create a custom client + return new DefaultOpenShiftClient(); + } +} +---- + +Mock support is also provided in a similar fashion: + +[source, java] +---- +@QuarkusTestResource(OpenShiftMockServerTestResource.class) +@QuarkusTest +public class OpenShiftClientTest { + + @MockServer + private OpenShiftMockServer mockServer; +... +---- + +To use this feature, you have to add a dependency on `quarkus-test-openshift-client`: + +[source,xml] +---- + + io.quarkus + quarkus-test-openshift-client + test + +---- + == Configuration Reference include::{generated-dir}/config/quarkus-kubernetes-client.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/kubernetes-config.adoc b/docs/src/main/asciidoc/kubernetes-config.adoc index 4eeb8949a102e..a8a8209bfcb84 100644 --- a/docs/src/main/asciidoc/kubernetes-config.adoc +++ b/docs/src/main/asciidoc/kubernetes-config.adoc @@ -1,14 +1,14 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Kubernetes Config include::./attributes.adoc[] -Quarkus includes the `kubernetes-config` extension which allows developers to use Kubernetes https://cloud.google.com/kubernetes-engine/docs/concepts/configmap[ConfigMaps] as a configuration source, without having to mount the ConfigMaps into the https://kubernetes.io/docs/concepts/workloads/pods/pod/[Pod] running the Quarkus application. +Quarkus includes the `kubernetes-config` extension which allows developers to use Kubernetes https://cloud.google.com/kubernetes-engine/docs/concepts/configmap[ConfigMaps] and https://cloud.google.com/kubernetes-engine/docs/concepts/secret[Secrets] as a configuration source, without having to mount them into the https://kubernetes.io/docs/concepts/workloads/pods/pod/[Pod] running the Quarkus application. == Configuration @@ -16,7 +16,7 @@ Quarkus includes the `kubernetes-config` extension which allows developers to us Once you have your Quarkus project configured you can add the `kubernetes-config` extension by running the following command in your project base directory. -[source] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="kubernetes-config" ---- @@ -33,19 +33,27 @@ This will add the following to your `pom.xml`: == Usage -The extension works by reading ConfigMaps directly from the Kubernetes API server using the link:kubernetes-client[Kubernetes Client]. +The extension works by reading ConfigMaps and Secrets directly from the Kubernetes API server using the link:kubernetes-client[Kubernetes Client]. -The extension understands the following types of ConfigMaps as input sources: +The extension understands the following types of ConfigMaps and Secrets as input sources: -* ConfigMaps that contain literal data (see https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#create-configmaps-from-literal-values[this] for an example on how to create one) -* ConfigMaps created from files named, `application.properties`, `application.yaml` or `application.yml` (see https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#create-configmaps-from-files[this] for an example on how to create one). +* ConfigMaps and Secrets that contain literal data (see https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#create-configmaps-from-literal-values[this] for an example on how to create one) +* ConfigMaps and Secrets created from files named `application.properties`, `application.yaml` or `application.yml` (see https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#create-configmaps-from-files[this] for an example on how to create one). -To configure which ConfigMaps (from the namespace that the application runs in, or the explicitly configured namespace via `quarkus.kubernetes-client.namespace`), you can set the `quarkus.kubernetes-config.config-maps` property (in any of the usual Quarkus ways). If access to ConfigMaps from a specific namespace you can set the -`quarkus.kubernetes-config.namespace` property. Keep in mind however that you will also have to explicitly enable the retrieval of ConfigMaps by setting `quarkus.kubernetes-config.enabled=true` (the default is `false` in order to make it easy to test the application locally). +You have to explicitly enable the retrieval of ConfigMaps and Secrets by setting `quarkus.kubernetes-config.enabled=true`. +The default is `false` in order to make it easy to test the application locally. + +Afterwards, set the `quarkus.kubernetes-config.config-maps` property to configure which ConfigMaps should be used. +Set the `quarkus.kubernetes-config.secrets` property to configure which Secrets should be used. +To access ConfigMaps and Secrets from a specific namespace, you can set the `quarkus.kubernetes-config.namespace` property. === Priority of obtained properties -The properties obtained from the ConfigMaps have a higher priority (i.e. they override) any properties of the same name that are found in `application.properties` (or the YAML equivalents), but they have lower priority than properties set via Environment Variables or Java System Properties. +The properties obtained from the ConfigMaps and Secrets have a higher priority than (i.e. they override) any properties of the same name that are found in `application.properties` (or the YAML equivalents), but they have lower priority than properties set via Environment Variables or Java System Properties. + +Furthermore, when multiple ConfigMaps (or Secrets) are used, ConfigMaps (or Secrets) defined later in the list have a higher priority that ConfigMaps defined earlier in the list. + +Finally, when both ConfigMaps and Secrets are used, the latter always a higher priority than the former. === Kubernetes Permissions @@ -54,6 +62,11 @@ that is used to run the application needs to have the proper permissions for suc Thankfully, when using the `kubernetes-config` extension along with the link:deploying-to-kubernetes[Kubernetes] extension, all the necessary Kubernetes resources to make that happen are automatically generated. +==== Secrets + +By default, the link:deploying-to-kubernetes[Kubernetes] extension doesn't generate the necessary resources to allow accessing secrets. +Set `quarkus.kubernetes-config.secrets.enabled=true` to generate the necessary role and corresponding role binding. + == Configuration Reference include::{generated-dir}/config/quarkus-kubernetes-config-kubernetes-config-source-config.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/lifecycle.adoc b/docs/src/main/asciidoc/lifecycle.adoc index b95d7debc9586..a97a99fe594d1 100644 --- a/docs/src/main/asciidoc/lifecycle.adoc +++ b/docs/src/main/asciidoc/lifecycle.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Application Initialization and Termination @@ -37,7 +37,7 @@ The solution is located in the `lifecycle-quickstart` {quickstarts-tree-url}/lif First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -58,7 +58,7 @@ It generates: == The main method -By default Quarkus will automatically generate a main method, that will boostrap Quarkus and then just wait for +By default Quarkus will automatically generate a main method, that will bootstrap Quarkus and then just wait for shutdown to be initiated. Let's provide our own main method: [source,java] ---- @@ -214,7 +214,7 @@ TIP: If a class is annotated with `@Startup` but with no scope annotation then ` Run the application with: `./mvnw compile quarkus:dev`, the logged message is printed. When the application is stopped, the second log message is printed. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable using `./mvnw clean package -Pnative`. == Launch Modes diff --git a/docs/src/main/asciidoc/liquibase.adoc b/docs/src/main/asciidoc/liquibase.adoc index 2a6d8fbc50216..0bb4c2dfa2fdc 100644 --- a/docs/src/main/asciidoc/liquibase.adoc +++ b/docs/src/main/asciidoc/liquibase.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Liquibase @@ -134,19 +134,16 @@ The Liquibase properties are prefixed exactly the same way as the named datasour quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.users.db-kind=h2 quarkus.datasource.users.username=username1 quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 quarkus.datasource.users.jdbc.max-size=11 quarkus.datasource.inventory.db-kind=h2 quarkus.datasource.inventory.username=username2 quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 quarkus.datasource.inventory.jdbc.max-size=12 # Liquibase configuration for the default datasource diff --git a/docs/src/main/asciidoc/logging-sentry.adoc b/docs/src/main/asciidoc/logging-sentry.adoc index 5e1ba7fc87846..67e259d5b3c68 100644 --- a/docs/src/main/asciidoc/logging-sentry.adoc +++ b/docs/src/main/asciidoc/logging-sentry.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Logging to Sentry diff --git a/docs/src/main/asciidoc/logging.adoc b/docs/src/main/asciidoc/logging.adoc index 32a32cec44380..abef279077cb3 100644 --- a/docs/src/main/asciidoc/logging.adoc +++ b/docs/src/main/asciidoc/logging.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Configuring Logging @@ -51,6 +51,32 @@ public class ExampleResource { NOTE: If you use JBoss Logging but one of your libraries uses a different logging API, you may need to configure a link:#logging-adapters[Logging Adapter]. +You can also inject a configured `org.jboss.logging.Logger` instance in your beans and resource classes. + +[source, java] +---- +import org.jboss.logging.Logger; + +@ApplicationScoped +class SimpleBean { + + @Inject + Logger log; <1> + + @LoggerName("foo") + Logger fooLog; <2> + + public void ping() { + log.info("Simple!"); + fooLog.info("Goes to _foo_ logger!"); + } +} +---- +<1> The FQCN of the declaring class is used as a logger name, i.e. `org.jboss.logging.Logger.getLogger(SimpleBean.class)` will be used. +<2> In this case, the name _foo_ is used as a logger name, i.e. `org.jboss.logging.Logger.getLogger("foo")` will be used. + +NOTE: The logger instances are cached internally. Therefore, a logger injected e.g. into a `@RequestScoped` bean is shared for all bean instances to avoid possible performance penalty associated with logger instantiation. + === What about Apache Log4j ? link:https://logging.apache.org/log4j/2.x/[Log4j] is a logging implementation: it contains a logging backend and a logging facade. @@ -105,10 +131,19 @@ quarkus.log.level=INFO quarkus.log.category."org.hibernate".level=DEBUG ---- +Setting a log level below `DEBUG` requires the minimum log level to be adjusted, +either globally via the `quarkus.log.min-level` property or per-category as shown in the example above, +as well as adjusting the log level itself. + +Minimum logging level sets a floor level that Quarkus will be needed to potentially generate, +opening the door to optimization opportunities. +As an example, in native execution the minimum level enables lower level checks (e.g. `isTraceEnabled`) to be folded to `false`, +resulting in dead code elimination for code that will never to be executed. + All possible properties are listed in <>. NOTE: If you are adding these properties via command line make sure `"` is escaped. -For example `-Dquarkus.log.category.\"org.hibernate\".level=DEBUG`. +For example `-Dquarkus.log.category.\"org.hibernate\".level=TRACE`. === Logging categories @@ -122,6 +157,7 @@ These can also be overridden by attaching a one or more named handlers to a cate |=== |Property Name|Default|Description |quarkus.log.category."".level|INFO footnote:[Some extensions may define customized default log levels for certain categories, in order to reduce log noise by default. Setting the log level in configuration will override any extension-defined log levels.]|The level to use to configure the category named ``. The quotes are necessary. +|quarkus.log.category."".min-level|DEBUG |The minimum logging level to use to configure the category named ``. The quotes are necessary. |quarkus.log.category."".use-parent-handlers|true|Specify whether or not this logger should send its output to its parent logger. |quarkus.log.category."".handlers=[]|empty footnote:[By default the configured category gets the same handlers attached as the one on the root logger.]|The names of the handlers that you want to attach to a specific category. |=== @@ -136,7 +172,8 @@ The root logger category is handled separately, and is configured via the follow [cols=" ---- -JDK Logging: - -[source, xml] ----- - - org.jboss.logging - jboss-logging-jdk - ----- - Log4j: [source, xml] ---- - org.jboss.logging - jboss-logging-log4j + org.jboss.logmanager + log4j-jboss-logmanager ---- @@ -398,7 +426,7 @@ Log4j2: [source, xml] ---- - org.jboss.logging + org.jboss.logmanager log4j2-jboss-logmanager ---- @@ -409,7 +437,7 @@ And Slf4j: ---- org.jboss.slf4j - slf4j-jboss-logging + slf4j-jboss-logmanager ---- diff --git a/docs/src/main/asciidoc/mailer.adoc b/docs/src/main/asciidoc/mailer.adoc index 10a134dbd1fc0..1c692d72cf093 100644 --- a/docs/src/main/asciidoc/mailer.adoc +++ b/docs/src/main/asciidoc/mailer.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Sending emails @@ -29,18 +29,20 @@ It covers simple emails, attachments, inlined attachments, the reactive and impe Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=sending-email-quickstart \ - -Dextensions="mailer" + -DclassName="org.acme.mailer.SimpleMailerResource" \ + -Dpath="/simple" \ + -Dextensions="resteasy,mailer" cd sending-email-quickstart ---- If you already have an existing project, add the `mailer` extension: -[source] +[source,bash] ---- ./mvnw quarkus:add-extensions -Dextensions="mailer" ---- @@ -62,7 +64,7 @@ Note that the password can also be configured using system properties and enviro Here is an example using _sendgrid_: -[source] +[source,properties] ---- quarkus.mailer.from=test@quarkus.io quarkus.mailer.host=smtp.sendgrid.net @@ -79,14 +81,16 @@ It is recommended to encrypt any sensitive data, such as the `quarkus.mailer.pas One approach is to save the value into a secure store like HashiCorp Vault, and refer to it from the configuration. Assuming for instance that Vault contains key `mail-password` at path `myapps/myapp/myconfig`, then the mailer extension can be simply configured as: -``` + +[source,properties] +---- ... # path within the kv secret engine where is located the application sensitive configuration quarkus.vault.secret-config-kv-path=myapps/myapp/myconfig ... quarkus.mailer.password=${mail-password} -``` +---- Please note that the password value is evaluated only once, at startup time. If `mail-password` was changed in Vault, the only way to get the new value would be to restart the application. ==== @@ -287,6 +291,27 @@ public CompletionStage send() { TIP: Injected mail templates are validated during build. If there is no matching template in `src/main/resources/templates` the build fails. +Adding `quarkus-resteasy-reactive` extension allows you to directly return a `Uni`, such as: + +[source, java] +---- +@Inject +MailTemplate hello; + +@GET +@Path("/mail") +public Uni send() { <1> + return hello.to("to@acme.org") + .subject("Hello from Qute template") + // the template looks like: Hello {name}! + .data("name", "John") + .send() + .onItem().transform(x -> Response.accepted().build()); <2> +} +---- +<1> The method returns a `Uni` +<2> Transform the `Uni` returned by `MailTemplate` into `Unit` + == Testing email sending Because it is very inconvenient to send emails during development and testing, you can set the `quarkus.mailer.mock` boolean @@ -340,7 +365,7 @@ When done, you can configure your Quarkus application by adding the following pr With TLS: -[source] +[source,properties] ---- quarkus.mailer.auth-methods=DIGEST-MD5 CRAM-SHA256 CRAM-SHA1 CRAM-MD5 PLAIN LOGIN quarkus.mailer.from=YOUREMAIL@gmail.com @@ -353,7 +378,7 @@ quarkus.mailer.password=YOURGENERATEDAPPLICATIONPASSWORD Or with SSL: -[source] +[source,properties] ---- quarkus.mailer.auth-methods=DIGEST-MD5 CRAM-SHA256 CRAM-SHA1 CRAM-MD5 PLAIN LOGIN quarkus.mailer.from=YOUREMAIL@gmail.com @@ -388,8 +413,6 @@ If you need fine control on how the mail is sent, for instance if you need to re Three API flavors are exposed: * the Mutiny client (`io.vertx.mutiny.ext.mail.MailClient`) -* the Axle client (`io.vertx.axle.ext.mail.MailClient`), using `CompletionStage` and Reactive Streams `Publisher` - deprecated, it is recommended to switch to the Mutiny client -* the RX Java 2 client (`io.vertx.reactivex.ext.mail.MailClient`) - deprecated, it is recommended to switch to the Mutiny client * the bare client (`io.vertx.ext.mail.MailClient`) Check the link:vertx[Using Vert.x guide] for further details about these different APIs and how to select the most suitable for you. diff --git a/docs/src/main/asciidoc/maven-tooling.adoc b/docs/src/main/asciidoc/maven-tooling.adoc index c63409ff1735f..0cfb5cf4abf0f 100644 --- a/docs/src/main/asciidoc/maven-tooling.adoc +++ b/docs/src/main/asciidoc/maven-tooling.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Building applications with Maven @@ -12,7 +12,7 @@ include::./attributes.adoc[] With Maven, you can scaffold a new project with: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=my-groupId \ @@ -41,7 +41,7 @@ The following table lists the attributes you can pass to the `create` command: | The artifact id of the created project. Not passing it triggers the interactive mode. | `projectVersion` -| `1.0-SNAPSHOT` +| `1.0.0-SNAPSHOT` | The version of the created project | `platformGroupId` @@ -85,14 +85,14 @@ Instructions to build the image and run the container are written in those Docke From inside a Quarkus project, you can obtain a list of the available extensions with: -[source,shell] +[source,bash] ---- ./mvnw quarkus:list-extensions ---- You can enable an extension using: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="hibernate-validator" ---- @@ -120,7 +120,7 @@ $ ./mvnw quarkus:add-extensions -Dextensions=jdbc,agroal,non-exist-ent You can install all extensions which match a globbing pattern : -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="hibernate-*" ---- @@ -130,7 +130,7 @@ You can install all extensions which match a globbing pattern : Quarkus comes with a built-in development mode. Run your application with: -[source,shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -149,16 +149,26 @@ with compilation or deployment an error page will let you know. Hit `CTRL+C` to stop the application. +[NOTE] +==== +By default, `quarkus:dev` sets the debug host to `localhost` (for security reasons). If you need to change this, for example to enable debugging on all hosts, you can use the `-DdebugHost` option like so: + +[source,bash] +---- +./mvnw compile quarkus:dev -DdebugHost=0.0.0.0 +---- +==== + === Remote Development Mode -It is possible to use development mode remotely, so that you can run Quarkus in a container environment (such as Openshift) +It is possible to use development mode remotely, so that you can run Quarkus in a container environment (such as OpenShift) and have changes made to your local files become immediately visible. This allows you to develop in the same environment you will actually run your app in, and with access to the same services. WARNING: Do not use this in production. This should only be used in a development environment. You should not run production application in dev mode. -To do this you must build a mutable application, using the `mutable-jar` format. Set the following properties in `application.xml`: +To do this you must build a mutable application, using the `mutable-jar` format. Set the following properties in `application.properties`: [source,properties] ---- @@ -173,10 +183,18 @@ however they can also be started in dev mode. <3> The URL that your app is going to be running in dev mode at. This is only needed on the local side, so you may want to leave it out of the properties file and specify it as a system property on the command line. +The `mutable-jar` is then built in the same way that a regular Quarkus jar is built, i.e. by issuing: + +[source,bash] +---- +./mvnw package +---- + Before you start Quarkus on the remote host set the environment variable `QUARKUS_LAUNCH_DEVMODE=true`. If you are -on bare metal you can just set this via the `export QUARKUS_LAUNCH_DEVMODE=true` command, if you are running using -docker start the image with `-e QUARKUS_LAUNCH_DEVMODE=true`. When the application starts you should now see the following -line in the logs: `Profile dev activated. Live Coding activated`. +on bare metal you can set it via the `export QUARKUS_LAUNCH_DEVMODE=true` command and then run the application with the proper `java -jar ...` command to run the application. + +If you plan on running the application via Docker, then you'll need to add `-e QUARKUS_LAUNCH_DEVMODE=true` to the `docker run` command. +When the application starts you should now see the following line in the logs: `Profile dev activated. Live Coding activated`. NOTE: The remote side does not need to include Maven or any other development tools. The normal `fast-jar` Dockerfile that is generated with a new Quarkus application is all you need. If you are using bare metal launch the Quarkus runner @@ -184,7 +202,7 @@ jar, do not attempt to run normal devmode. Now you need to connect your local agent to the remote host, using the `remote-dev` command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:remote-dev -Dquarkus.live-reload.url=http://my-remote-host:8080 ---- @@ -197,6 +215,10 @@ If you do not want to use the HTTP feature then you can simply run the `remote-d In this mode the command will continuously rebuild the local application, so you can use an external tool such as odo or rsync to sync to the remote application. +All the config options are shown below: + +include::{generated-dir}/config/quarkus-live-reload-live-reload-config.adoc[opts=optional, leveloffset=+1] + NOTE: It is recommended you use SSL when using remote dev mode, however even if you are using an unencrypted connection your password is never sent directly over the wire. For the initial connection request the password is hashed with the initial state data, and subsequent requests hash it with a random session id generated by the server and any body contents @@ -254,7 +276,7 @@ An additional system property `suspend` can be used to suspend the JVM, when lau [TIP] ==== -You can also run a Quarkus application in debug mode with a suspended JVM using `./mvnw compile quarkus:dev -Ddebug` which is a shorthand for `./mvnw compile quarkus:dev -Ddebug=true`. +You can also run a Quarkus application in debug mode with a suspended JVM using `./mvnw compile quarkus:dev -Dsuspend` which is a shorthand for `./mvnw compile quarkus:dev -Dsuspend=true`. Then, attach your debugger to `localhost:5005`. ==== @@ -318,10 +340,10 @@ To be able to use it, the following plugin configuration has to be added to the Now you should be able to execute `./mvnw quarkus-bootstrap:build-tree` on your project and see something like: -[source,shell,subs=attributes+] +[source,text,subs=attributes+] ---- [INFO] --- quarkus-bootstrap-maven-plugin:{quarkus-version}:build-tree (default-cli) @ getting-started --- -[INFO] org.acme:getting-started:jar:1.0-SNAPSHOT +[INFO] org.acme:getting-started:jar:1.0.0-SNAPSHOT [INFO] └─ io.quarkus:quarkus-resteasy-deployment:jar:{quarkus-version} (compile) [INFO] ├─ io.quarkus:quarkus-resteasy-server-common-deployment:jar:{quarkus-version} (compile) [INFO] │ ├─ io.quarkus:quarkus-core-deployment:jar:{quarkus-version} (compile) @@ -335,7 +357,7 @@ Now you should be able to execute `./mvnw quarkus-bootstrap:build-tree` on your Native executables make Quarkus applications ideal for containers and serverless workloads. -Make sure to have `GRAALVM_HOME` configured and pointing to GraalVM version {graalvm-version}. +Make sure to have `GRAALVM_HOME` configured and pointing to GraalVM version {graalvm-version} (Make sure to use a Java 11 version of GraalVM). Verify that your `pom.xml` has the proper `native` profile (see <>). Create a native executable using: `./mvnw package -Pnative`. @@ -345,7 +367,7 @@ To run Integration Tests on the native executable, make sure to have the proper [source,shell] ---- -./mvnw verify -Pnative +$ ./mvnw verify -Pnative ... [quarkus-quickstart-runner:50955] universe: 391.96 ms [quarkus-quickstart-runner:50955] (parse): 904.37 ms @@ -379,7 +401,7 @@ Executing [/Users/starksm/Dev/JBoss/Quarkus/starksm64-quarkus-quickstarts/gettin The native executable will be specific to your operating system. To create an executable that will run in a container, use the following: -[source,shell] +[source,bash] ---- ./mvnw package -Dnative -Dquarkus.native.container-build=true ---- @@ -412,7 +434,7 @@ If you have not used <>, add the following <1> io.quarkus - quarkus-bom + quarkus-universe-bom ${quarkus.platform.version} pom import @@ -426,10 +448,13 @@ If you have not used <>, add the following io.quarkus quarkus-maven-plugin ${quarkus-plugin.version} + true build + generate-code + generate-code-tests @@ -490,10 +515,24 @@ If you have not used <>, add the following <5> Enable the `native` package type. The build will therefore produce a native executable. <6> If you want to test your native executable with Integration Tests, add the following plugin configuration. Test names `*IT` and annotated `@NativeImageTest` will be run against the native executable. See the link:building-native-image[Native executable guide] for more info. +=== Using fast-jar + +`fast-jar` is now the default quarkus package type. The result of `./mvnw package` command is a new directory under `target` named `quarkus-app`. + +You can run the application using: `java -jar target/quarkus-app/quarkus-run.jar`. + +WARNING: In order to successfully run the produced jar, you need to have the entire contents of the `quarkus-app` directory. If any of the files are missing, the application will not start or +might not function correctly. + +TIP: The `fast-jar` packaging results in creating an artifact that starts a little faster and consumes slightly less memory than a legacy Quarkus jar +because it has indexed information about which dependency jar contains classes and resources. It can thus avoid the lookup into potentially every jar +on the classpath that the legacy jar necessitates, when loading a class or resource. + [[uber-jar-maven]] === Uber-Jar Creation -Quarkus Maven plugin supports the generation of Uber-Jars by specifying a `quarkus.package.type=uber-jar` configuration option in your `application.properties`. +Quarkus Maven plugin supports the generation of Uber-Jars by specifying a `quarkus.package.type=uber-jar` configuration option in your `application.properties` +(or `uber-jar` in your `pom.xml`). The original jar will still be present in the `target` directory but it will be renamed to contain the `.original` suffix. diff --git a/docs/src/main/asciidoc/micrometer.adoc b/docs/src/main/asciidoc/micrometer.adoc new file mode 100644 index 0000000000000..860e7c8c755a4 --- /dev/null +++ b/docs/src/main/asciidoc/micrometer.adoc @@ -0,0 +1,438 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Micrometer Metrics + +include::./attributes.adoc[] + +This guide demonstrates how your Quarkus application can utilize the Micrometer +metrics library for runtime and application metrics. + +Apart from application-specific metrics, which are described in this guide, you may also utilize built-in metrics +exposed by various Quarkus extensions. These are described in the guide for each particular extension that supports +built-in metrics. + +IMPORTANT: Micrometer is the recommended approach to metrics for Quarkus. + +== Prerequisites + +To complete this guide, you need: + +* less than 15 minutes +* an IDE +* JDK 1.8+ installed with `JAVA_HOME` configured appropriately +* Apache Maven {maven-version} + +== Architecture + +In this example, we build a very simple microservice which offers one REST endpoint and that determines +if a number is prime. + +== Solution + +We recommend that you follow the instructions in the next sections and create the application step by step. +However, you can go right to the completed example. + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `micrometer-quickstart` {quickstarts-tree-url}/micrometer-quickstart[directory]. + +== Creating the Maven Project + +Micrometer defines a core library and a set of additional libraries that support different monitoring systems. +Quarkus Micrometer extensions are structured similarly: `quarkus-micrometer` provides core micrometer support and +runtime integration and other supporting Quarkus and Quarkiverse extensions bring in additional dependencies +and requirements to support specific monitoring systems. + +For this example, we'll use the Prometheus registry. + +First, we need a new project. Create a new project with the following command: + +[source,bash,subs=attributes+] +---- +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ + -DprojectGroupId=org.acme \ + -DprojectArtifactId=micrometer-quickstart \ + -DclassName="org.acme.micrometer.PrimeNumberResource" \ + -Dpath="/" \ + -Dextensions="resteasy,micrometer-registry-prometheus" +cd micrometer-quickstart +---- + +This command generates a Maven project, that imports the `micrometer-registry-prometheus` extension as a dependency. +This extension will load the core `micrometer` extension as well as additional library dependencies required to support +prometheus. + +If you already have your Quarkus project configured, you can add the `micrometer-registry-prometheus` extension +to your project by running the following command in your project base directory: + +[source,bash] +---- +./mvnw quarkus:add-extension -Dextensions="micrometer-registry-prometheus" +---- + +This will add the following to your `pom.xml`: + +[source,xml] +---- + + io.quarkus + quarkus-micrometer-registry-prometheus + +---- + +== Writing the application + +The application consists of a single class that implements an algorithm for checking whether a number is prime. +This algorithm is exposed over a REST interface. With the Micrometer extension enabled, metrics for all http +server requests are collected automatically. + +We do want to add a few other metrics to demonstrate how those types work: + +* A counter will be incremented for every prime number discovered +* A gauge will store the highest prime number discovered +* A timer will record the time spent testing if ia number is prime. + +[source,java] +---- +package org.acme.micrometer; + +import io.micrometer.core.instrument.MeterRegistry; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import java.util.concurrent.atomic.LongAccumulator; +import java.util.function.Supplier; + +@Path("/") +public class PrimeNumberResource { + + private final LongAccumulator highestPrime = new LongAccumulator(Long::max, 0); + private final MeterRegistry registry; + + PrimeNumberResource(MeterRegistry registry) { + this.registry = registry; + + // Create a gauge that uses the highestPrimeNumberSoFar method + // to obtain the highest observed prime number + registry.gauge("prime.number.max", this, + PrimeNumberResource::highestObservedPrimeNumber); + } + + @GET + @Path("/{number}") + @Produces(MediaType.TEXT_PLAIN) + public String checkIfPrime(@PathParam("number") long number) { + if (number < 1) { + return "Only natural numbers can be prime numbers."; + } + if (number == 1) { + return "1 is not prime."; + } + if (number == 2) { + return "2 is prime."; + } + if (number % 2 == 0) { + return number + " is not prime, it is divisible by 2."; + } + + Supplier supplier = () -> { + for (int i = 3; i < Math.floor(Math.sqrt(number)) + 1; i = i + 2) { + if (number % i == 0) { + return number + " is not prime, is divisible by " + i + "."; + } + } + highestPrime.accumulate(number); + return number + " is prime."; + }; + + return registry.timer("prime.number.test").wrap(supplier).get(); + } + + /** + * This method is called by the registered {@code highest.prime.number} gauge. + * @return the highest observed prime value + */ + long highestObservedPrimeNumber() { + return highestPrime.get(); + } +} +---- + +== Running and using the application + +To run the microservice in dev mode, use `./mvnw clean compile quarkus:dev` + +=== Generate some values for the metrics + +First, ask the endpoint whether some numbers are prime numbers. + +[source,bash] +---- +curl localhost:8080/350 +---- + +The application will respond that 350 is not a prime number because it can be divided by 2. + +Now for some large prime number so that the test takes a bit more time: + +[source,bash] +---- +curl localhost:8080/629521085409773 +---- + +The application will respond that 629521085409773 is a prime number. +If you want, try some more calls with numbers of your choice. + +=== Review the generated metrics + +To view the metrics, execute `curl localhost:8080/q/metrics/` + +Prometheus-formatted metrics will be returned in plain text in no particular order. + +The application above has only one custom gauge that measures the time +spent determining whether or not a number is prime. The Micrometer extension +enables additional system, jvm, and http metrics. A subset of the collected metrics +are shown below. + +[source,text] +---- +# HELP http_server_requests_seconds +# TYPE http_server_requests_seconds summary +http_server_requests_seconds_count{method="GET",outcome="SUCCESS",status="200",uri="/{number}",} 4.0 +http_server_requests_seconds_sum{method="GET",outcome="SUCCESS",status="200",uri="/{number}",} 0.041501773 +# HELP http_server_requests_seconds_max +# TYPE http_server_requests_seconds_max gauge +http_server_requests_seconds_max{method="GET",outcome="SUCCESS",status="200",uri="/{number}",} 0.038066359 +# HELP jvm_threads_peak_threads The peak live thread count since the Java virtual machine started or peak was reset +# TYPE jvm_threads_peak_threads gauge +jvm_threads_peak_threads 56.0 +# HELP http_server_connections_seconds_max +# TYPE http_server_connections_seconds_max gauge +http_server_connections_seconds_max 0.102580737 +# HELP http_server_connections_seconds +# TYPE http_server_connections_seconds summary +http_server_connections_seconds_active_count 5.0 +http_server_connections_seconds_duration_sum 0.175032815 +# HELP system_load_average_1m The sum of the number of runnable entities queued to available processors and the number of runnable entities running on the available processors averaged over a period of time +# TYPE system_load_average_1m gauge +system_load_average_1m 2.4638671875 +# HELP http_server_bytes_written_max +# TYPE http_server_bytes_written_max gauge +http_server_bytes_written_max 39.0 +# HELP http_server_bytes_written +# TYPE http_server_bytes_written summary +http_server_bytes_written_count 4.0 +http_server_bytes_written_sum 99.0 +# HELP jvm_classes_loaded_classes The number of classes that are currently loaded in the Java virtual machine +# TYPE jvm_classes_loaded_classes gauge +jvm_classes_loaded_classes 9341.0 +# HELP prime_number_max +# TYPE prime_number_max gauge +prime_number_max 887.0 +# HELP jvm_info JVM version info +# TYPE jvm_info gauge +jvm_info{runtime="OpenJDK Runtime Environment",vendor="Oracle Corporation",version="13.0.2+8",} 1.0 +# HELP jvm_classes_unloaded_classes_total The total number of classes unloaded since the Java virtual machine has started execution +# TYPE jvm_classes_unloaded_classes_total counter +jvm_classes_unloaded_classes_total 28.0 +# HELP prime_number_test_seconds +# TYPE prime_number_test_seconds summary +prime_number_test_seconds_count 3.0 +prime_number_test_seconds_sum 1.94771E-4 +# HELP prime_number_test_seconds_max +# TYPE prime_number_test_seconds_max gauge +prime_number_test_seconds_max 1.76162E-4 +# HELP http_server_bytes_read +# TYPE http_server_bytes_read summary +http_server_bytes_read_count 4.0 +http_server_bytes_read_sum 0.0 +# HELP http_server_bytes_read_max +# TYPE http_server_bytes_read_max gauge +http_server_bytes_read_max 0.0 +# HELP jvm_threads_states_threads The current number of threads having NEW state +# TYPE jvm_threads_states_threads gauge +jvm_threads_states_threads{state="runnable",} 37.0 +jvm_threads_states_threads{state="blocked",} 0.0 +jvm_threads_states_threads{state="waiting",} 15.0 +jvm_threads_states_threads{state="timed-waiting",} 4.0 +jvm_threads_states_threads{state="new",} 0.0 +jvm_threads_states_threads{state="terminated",} 0.0 +# HELP jvm_buffer_memory_used_bytes An estimate of the memory that the Java virtual machine is using for this buffer pool +# TYPE jvm_buffer_memory_used_bytes gauge +jvm_buffer_memory_used_bytes{id="mapped",} 0.0 +jvm_buffer_memory_used_bytes{id="direct",} 149521.0 +# HELP jvm_memory_committed_bytes The amount of memory in bytes that is committed for the Java virtual machine to use +# TYPE jvm_memory_committed_bytes gauge +jvm_memory_committed_bytes{area="nonheap",id="CodeHeap 'profiled nmethods'",} 1.1403264E7 +jvm_memory_committed_bytes{area="heap",id="G1 Survivor Space",} 4194304.0 +jvm_memory_committed_bytes{area="heap",id="G1 Old Gen",} 9.2274688E7 +jvm_memory_committed_bytes{area="nonheap",id="Metaspace",} 4.9803264E7 +jvm_memory_committed_bytes{area="nonheap",id="CodeHeap 'non-nmethods'",} 2555904.0 +jvm_memory_committed_bytes{area="heap",id="G1 Eden Space",} 6.9206016E7 +jvm_memory_committed_bytes{area="nonheap",id="Compressed Class Space",} 6815744.0 +jvm_memory_committed_bytes{area="nonheap",id="CodeHeap 'non-profiled nmethods'",} 2555904.0 +---- + +Note that metrics appear lazily, you often won't see any data for your endpoint until +something tries to access it, etc. + +== Using MeterFilter to configure metrics + +Micrometer uses `MeterFilter` instances to customize the metrics emitted by `MeterRegistry` instances. +The Micrometer extension will detect `MeterFilter` CDI beans and use them when initializing `MeterRegistry` +instances. + +[source,java] +---- +@Singleton +public class CustomConfiguration { + + @ConfigProperty(name = "deployment.env") + String deploymentEnv; + + /** Define common tags that apply only to a Prometheus Registry */ + @Produces + @Singleton + @MeterFilterConstraint(applyTo = PrometheusMeterRegistry.class) + public MeterFilter configurePrometheusRegistries() { + return MeterFilter.commonTags(Arrays.asList( + Tag.of("registry", "prometheus"))); + } + + /** Define common tags that apply globally */ + @Produces + @Singleton + public MeterFilter configureAllRegistries() { + return MeterFilter.commonTags(Arrays.asList( + Tag.of("env", deploymentEnv))); + } + + /** Enable histogram buckets for a specific timer */ + @Produces + @Singleton + public MeterFilter enableHistogram() { + return new MeterFilter() { + @Override + public DistributionStatisticConfig configure(Meter.Id id, DistributionStatisticConfig config) { + if(id.getName().startsWith("myservice")) { + return DistributionStatisticConfig.builder() + .percentiles(0.5, 0.95) // median and 95th percentile, not aggregable + .percentilesHistogram(true) // histogram buckets (e.g. prometheus histogram_quantile) + .build() + .merge(config); + } + return config; + } + }; + } +} +---- + +In this example, a singleton CDI bean will produce two different `MeterFilter` beans. One will be applied only to +Prometheus `MeterRegistry` instances (using the `@MeterFilterConstraint` qualifier), and another will be applied +to all `MeterRegistry` instances. An application configuration property is also injected and used as a tag value. + +== Using other Registry implementations + +If you aren't using Prometheus, you have a few options. Some Micrometer registry implementations +have been wrapped in +https://github.com/quarkiverse/quarkiverse-micrometer-registry[Quarkiverse extensions]. +To use the Micrometer StackDriver MeterRegistry, for example, you would use the +`quarkus-micrometer-registry-stackdriver` extension: + +[source,xml] +---- + + io.quarkiverse + quarkus-micrometer-registry-stackdriver + +---- + +If the Micrometer registry you would like to use does not yet have an associated extension, +use the `quarkus-micrometer` extension and bring in the packaged MeterRegistry dependency directly: + +[source,xml] +---- + + io.quarkus + quarkus-micrometer + + + com.acme + custom-micrometer-registry + +---- + +You will then need to specify your own provider to configure and initialize the +MeterRegistry, as discussed in the next section. + +== Creating a customized MeterRegistry + +Use a custom `@Produces` method to create and configure a customized `MeterRegistry` if you need to. + +The following example customizes the line format used for StatsD: + +[source,java] +---- +@Produces +@Singleton +public StatsdMeterRegistry createStatsdMeterRegistry(StatsdConfig statsdConfig, Clock clock) { + // define what to do with lines + Consumer lineLogger = line -> logger.info(line); + + // inject a configuration object, and then customize the line builder + return StatsdMeterRegistry.builder(statsdConfig) + .clock(clock) + .lineSink(lineLogger) + .build(); +} +---- + +This example corresponds to the following instructions in the Micrometer documentation: +https://micrometer.io/docs/registry/statsD#_customizing_the_metrics_sink + +Note that the method returns the specific type of `MeterRegistry` as a `@Singleton`. Use MicroProfile Config +to inject any configuration attributes you need to configure the registry. Most Micrometer registry extensions, +like `quarkus-micrometer-registry-statsd`, define a producer for registry-specific configuration objects +that are integrated with the Quarkus configuration model. + +== Support for the MicroProfile Metrics API + +If you use the MicroProfile Metrics API in your application, the Micrometer extension will create an adaptive +layer to map those metrics into the Micrometer registry. Note that naming conventions between the two +systems will change, but you can use MeterFilters to remap names or tags to what your dashboards require. + +[source,java] +---- +@Produces +@Singleton +public MeterFilter renameApplicationMeters() { + final String targetMetric = PrimeResource.class.getName() + ".highestPrimeNumberSoFar"; + + return MeterFilter() { + @Override + public Meter.Id map(Meter.Id id) { + // rename the specified metric (remove package), and drop the scope tag + // you could also use this to prepend a scope tag (application, base, vendor, if present) to the metric name + if (id.getName().equals(targetMetric)) { + List tags = id.getTags().stream().filter(x -> !"scope".equals(x.getKey())) + .collect(Collectors.toList()); + return id.withName("highestPrimeNumberSoFar").replaceTags(tags); + } + return id; + } + }; +} +---- + +NOTE: The MP Metrics API compatibility layer will be moved to a different extension in the future. + +== Configuration Reference + +include::{generated-dir}/config/quarkus-micrometer.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/microprofile-fault-tolerance.adoc b/docs/src/main/asciidoc/microprofile-fault-tolerance.adoc index c0600c8dd4b2e..6cb2699413ab6 100644 --- a/docs/src/main/asciidoc/microprofile-fault-tolerance.adoc +++ b/docs/src/main/asciidoc/microprofile-fault-tolerance.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Fault Tolerance @@ -44,14 +44,14 @@ The solution is located in the `microprofile-fault-tolerance-quickstart` {quicks First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=microprofile-fault-tolerance-quickstart \ -DclassName="org.acme.microprofile.faulttolerance.CoffeeResource" \ -Dpath="/coffee" \ - -Dextensions="smallrye-fault-tolerance, resteasy-jsonb" + -Dextensions="resteasy,smallrye-fault-tolerance,resteasy-jackson" cd microprofile-fault-tolerance-quickstart ---- @@ -163,19 +163,16 @@ import java.util.concurrent.atomic.AtomicLong; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; import org.jboss.logging.Logger; @Path("/coffee") -@Produces(MediaType.APPLICATION_JSON) public class CoffeeResource { private static final Logger LOGGER = Logger.getLogger(CoffeeResource.class); @Inject - private CoffeeRepositoryService coffeeRepository; + CoffeeRepositoryService coffeeRepository; private AtomicLong counter = new AtomicLong(0); @@ -204,7 +201,7 @@ in the `CoffeeResource#coffees()` endpoint method in about 50 % of requests. Why not check that our application works? Run the Quarkus development server with: -[source,shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -447,6 +444,39 @@ To test this out, do the following: 5. Give it 5 seconds during which circuit breaker should close and you should be able to make two successful requests again. +== Runtime configuration + +You can override the annotations parameters at runtime inside your `application.properties` file. + +If we take the retry example that we already saw: + +[source,java] +---- +package org.acme; + +import org.eclipse.microprofile.faulttolerance.Retry; +... + +public class CoffeeResource { + ... + @GET + @Retry(maxRetries = 4) + public List coffees() { + ... + } + ... +} +---- + +We can override the `maxRetries` parameter with 6 retries instead of 4 by the following configuration item: +[source,properties] +---- +org.acme.CoffeeResource/coffees/Retry/maxRetries=6 +---- + +NOTE: The format is `fully-qualified-class-name/method-name/annotation-name/property-name=value`. +You can also configure a property for all the annotation via `annotation-name/property-name=value`. + == Conclusion MicroProfile Fault Tolerance allows to improve resiliency of your application, without having an impact on the complexity @@ -456,7 +486,7 @@ All that is needed to enable the fault tolerance features in Quarkus is: * adding the `smallrye-fault-tolerance` Quarkus extension to your project using the `quarkus-maven-plugin`: + -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="smallrye-fault-tolerance" ---- diff --git a/docs/src/main/asciidoc/microprofile-graphql.adoc b/docs/src/main/asciidoc/microprofile-graphql.adoc index eb474468bf858..789adcf782269 100644 --- a/docs/src/main/asciidoc/microprofile-graphql.adoc +++ b/docs/src/main/asciidoc/microprofile-graphql.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - GraphQL @@ -65,13 +65,13 @@ The solution is located in the `microprofile-graphql-quickstart` {quickstarts-tr First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=microprofile-graphql-quickstart \ -DclassName="org.acme.microprofile.graphql.FilmResource" \ - -Dextensions="graphql" + -Dextensions="resteasy,graphql" cd microprofile-graphql-quickstart ---- @@ -360,7 +360,7 @@ Launch the quarkus app: The full schema of the GraphQL API can be retrieved by calling the following: -[source] +[source,bash] ---- curl http://localhost:8080/graphql/schema.graphql ---- @@ -380,6 +380,8 @@ GraphiQL can be accessed from http://localhost:8080/graphql-ui/ . image:graphql-ui-screenshot01.png[alt=GraphQL UI] +Have a look at the link:security-authorization[Authorization of Web Endpoints] Guide on how to add/remove security for the GraphQL UI. + == Query the GraphQL API Now visit the GraphiQL page that has been deployed in `dev` mode. @@ -529,6 +531,68 @@ query getFilmHeroes { The response now includes the heroes of the film. +=== Batching + +When you are exposing a `Collection` return like our `getAllFilms`, you might want to use the batch form of the above, to more efficiently fetch +the heroes: + +[source,java] +---- + public List> heroes(@Source List films) { // <1> + // Here fetch all hero lists + } +---- + +<1> Here receive the films as a batch, allowing you to fetch the corresponding heroes. + +=== Reactive + +Queries can be made reactive by using `Uni`, or `CompletionStage` as a return type, for example: + +[source,java] +---- + @Query + @Description("Get a Films from a galaxy far far away") + public Uni getFilm(@Name("filmId") int id) { + // ... + } +---- + +NOTE: Due to the underlying library, graphql-java, `Uni` is creating a `CompletionStage` under the hood. + +Or you can use `CompletionStage`: + +[source,java] +---- + @Query + @Description("Get a Films from a galaxy far far away") + public CompletionStage getFilm(@Name("filmId") int id) { + // ... + } +---- + +Using `Uni` or `CompletionStage` means that when a request contains more than one query, they will be executed concurrently. + +For instance, the query below will fetch `film0` and `film1` concurrently: + +[source, graphql] +---- +query getFilms { + film0: film(filmId: 0) { + title + director + releaseDate + episodeID + } + film1: film(filmId: 1) { + title + director + releaseDate + episodeID + } +} +---- + == Mutations Mutations are used when data is created, updated or deleted. @@ -631,6 +695,159 @@ query heroWithSurnames { } ---- +== Context + +You can get information about the GraphQL request anywhere in your code, using this experimental, SmallRye specific feature: + +[source,java] +---- +@Inject +Context context; +---- + +The context object allows you to get: + +- the original request (Query/Mutation) +- the arguments +- the path +- the selected fields +- any variables + +This allows you to optimize the downstream queries to the datastore. + +See the https://javadoc.io/doc/io.smallrye/smallrye-graphql-api/latest/io/smallrye/graphql/api/Context.html[JavaDoc] for more details. + +=== GraphQL-Java + +This context object also allows you to fall down to the underlying https://www.graphql-java.com/[graphql-java] features by using the leaky abstraction: + +[source,java] +---- +DataFetchingEnvironment dfe = context.unwrap(DataFetchingEnvironment.class); +---- + +You can also get access to the underlying `graphql-java` during schema generation, to add your own features directly: + +[source,java] +---- +public GraphQLSchema.Builder addMyOwnEnum(@Observes GraphQLSchema.Builder builder) { + + // Here add your own features directly, example adding an Enum + GraphQLEnumType myOwnEnum = GraphQLEnumType.newEnum() + .name("SomeEnum") + .description("Adding some enum type") + .value("value1") + .value("value2").build(); + + return builder.additionalType(myOwnEnum); +} +---- + +By using the `@Observer` you can add anything to the Schema builder. + +== Map to Scalar + +Another SmallRye specific experimental feature, allows you to map an existing scalar (that is mapped by the implementation to a certain Java type) to another type, +or to map complex object, that would typically create a `Type` or `Input` in GraphQL, to an existing scalar. + +=== Mapping an existing Scalar to another type: + +[source,java] +---- +public class Movie { + + @ToScalar(Scalar.Int.class) + Long idLongThatShouldChangeToInt; + + // .... +} +---- + +Above will map the `Long` java type to an `Int` Scalar type, rather than the https://download.eclipse.org/microprofile/microprofile-graphql-1.0/microprofile-graphql.html#scalars[default] `BigInteger`. + +=== Mapping a complex object to a Scalar type: + +[source,java] +---- +public class Person { + + @ToScalar(Scalar.String.class) + Phone phone; + + // .... +} +---- + +This will, rather than creating a `Type` or `Input` in GraphQL, map to a String scalar. + +To be able to do the above, the `Phone` object needs to have a constructor that takes a String (or `Int` / `Date` / etc.), +or have a setter method for the String (or `Int` / `Date` / etc.), +or have a `fromString` (or `fromInt` / `fromDate` - depending on the Scalar type) static method. + +For example: + +[source,java] +---- +public class Phone { + + private String number; + + // Getters and setters.... + + public static Phone fromString(String number) { + Phone phone = new Phone(); + phone.setNumber(number); + return phone; + } +} +---- + +See more about the `@ToScalar` feature in the https://javadoc.io/static/io.smallrye/smallrye-graphql-api/1.0.6/index.html?io/smallrye/graphql/api/ToScalar.html[JavaDoc]. + +== Error code + +You can add an error code on the error output in the GraphQL response by using the (SmallRye specific) `@ErrorCode`: + +[source,java] +---- +@ErrorCode("some-business-error-code") +public class SomeBusinessException extends RuntimeException { + // ... +} +---- + +When `SomeBusinessException` occurs, the error output will contain the Error code: + +[source,graphql] +---- +{ + "errors": [ + { + "message": "Unexpected failure in the system. Jarvis is working to fix it.", + "locations": [ + { + "line": 2, + "column": 3 + } + ], + "path": [ + "annotatedCustomBusinessException" + ], + "extensions": { + "exception": "io.smallrye.graphql.test.apps.error.api.ErrorApi$AnnotatedCustomBusinessException", + "classification": "DataFetchingException", + "code": "some-business-error-code" <1> + } + } + ], + "data": { + ... + } +} +---- + +<1> The error code + == Conclusion MicroProfile GraphQL enables clients to retrieve the exact data that is diff --git a/docs/src/main/asciidoc/microprofile-health.adoc b/docs/src/main/asciidoc/microprofile-health.adoc index ac181ae400136..52e2075173faf 100644 --- a/docs/src/main/asciidoc/microprofile-health.adoc +++ b/docs/src/main/asciidoc/microprofile-health.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - MicroProfile Health @@ -27,7 +27,7 @@ To complete this guide, you need: == Architecture In this guide, we build a simple REST application that exposes MicroProfile Health -functionalities at the `/health/live` and `/health/ready` endpoints according to the +functionalities at the `/q/health/live` and `/q/health/ready` endpoints according to the specification. == Solution @@ -45,12 +45,12 @@ The solution is located in the `microprofile-health-quickstart` First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=microprofile-health-quickstart \ - -Dextensions="health" + -Dextensions="smallrye-health" cd microprofile-health-quickstart ---- @@ -79,15 +79,15 @@ This will add the following to your `pom.xml`: Importing the `smallrye-health` extension directly exposes three REST endpoints: -- `/health/live` - The application is up and running. -- `/health/ready` - The application is ready to serve requests. -- `/health` - Accumulating all health check procedures in the application. +- `/q/health/live` - The application is up and running. +- `/q/health/ready` - The application is ready to serve requests. +- `/q/health` - Accumulating all health check procedures in the application. To check that the `smallrye-health` extension is working as expected: * start your Quarkus application with `./mvnw compile quarkus:dev` -* access the `http://localhost:8080/health/live` endpoint using your browser or -`curl http://localhost:8080/health/live` +* access the `http://localhost:8080/q/health/live` endpoint using your browser or +`curl http://localhost:8080/q/health/live` All of the health REST endpoints return a simple JSON object with two fields: @@ -129,16 +129,16 @@ public class SimpleHealthCheck implements HealthCheck { As you can see, the health check procedures are defined as CDI beans that implement the `HealthCheck` interface and are annotated with one of the health check qualifiers, such as: -- `@Liveness` - the liveness check accessible at `/health/live` -- `@Readiness` - the readiness check accessible at `/health/ready` +- `@Liveness` - the liveness check accessible at `/q/health/live` +- `@Readiness` - the readiness check accessible at `/q/health/ready` `HealthCheck` is a functional interface whose single method `call` returns a `HealthCheckResponse` object which can be easily constructed by the fluent builder API shown in the example. As we have started our Quarkus application in dev mode simply repeat the request -to `http://localhost:8080/health/live` by refreshing your browser window or by -using `curl http://localhost:8080/health/live`. Because we defined our health check +to `http://localhost:8080/q/health/live` by refreshing your browser window or by +using `curl http://localhost:8080/q/health/live`. Because we defined our health check to be a liveness procedure (with `@Liveness` qualifier) the new health check procedure is now present in the `checks` array. @@ -180,15 +180,15 @@ public class DatabaseConnectionHealthCheck implements HealthCheck { ---- -If you now rerun the health check at `http://localhost:8080/health/live` the `checks` +If you now rerun the health check at `http://localhost:8080/q/health/live` the `checks` array will contain only the previously defined `SimpleHealthCheck` as it is the only check defined with the `@Liveness` qualifier. However, if you access -`http://localhost:8080/health/ready` (in the browser or with -`curl http://localhost:8080/health/ready`) you will see only the +`http://localhost:8080/q/health/ready` (in the browser or with +`curl http://localhost:8080/q/health/ready`) you will see only the `Database connection health check` as it is the only health check defined with the `@Readiness` qualifier as the readiness health check procedure. -NOTE: If you access `http://localhost:8080/health` you will get back both checks. +NOTE: If you access `http://localhost:8080/q/health` you will get back both checks. More information about which health check procedures should be used in which situation is detailed in the MicroProfile Health specification. Generally, the liveness @@ -253,9 +253,9 @@ through the `HealthCheckResponse#up(String)` (there is also From now on, we utilize the full builder capabilities provided by the `HealthCheckResponseBuilder` class. -If you now rerun the readiness health check (at `http://localhost:8080/health/ready`) +If you now rerun the readiness health check (at `http://localhost:8080/q/health/ready`) the overall `status` should be DOWN. You can also check the liveness check at -`http://localhost:8080/health/live` which will return the overall `status` UP because +`http://localhost:8080/q/health/live` which will return the overall `status` UP because it isn't influenced by the readiness checks. As we shouldn't leave this application with a readiness check in a DOWN state and @@ -300,7 +300,7 @@ public class DataHealthCheck implements HealthCheck { } ---- -If you rerun the liveness health check procedure by accessing the `/health/live` +If you rerun the liveness health check procedure by accessing the `/q/health/live` endpoint you can see that the new health check `Health check with data` is present in the `checks` array. This check contains a new attribute called `data` which is a JSON object consisting of the properties we have defined in our health check procedure. @@ -338,7 +338,7 @@ NOTE: Experimental - not included in the MicroProfile specification The Quarkus `smallrye-health` extension ships with `health-ui` and enables it by default in dev and test modes, but it can also be explicitly configured for production mode as well. -`health-ui` can be accessed from http://localhost:8080/health-ui/ . +`health-ui` can be accessed from http://localhost:8080/q/health-ui/ . image:health-ui-screenshot01.png[alt=Health UI] @@ -355,7 +355,7 @@ All that is needed to enable the MicroProfile Health features in Quarkus is: `quarkus-maven-plugin`: + [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="health" +./mvnw quarkus:add-extension -Dextensions="smallrye-health" ---- * or simply adding the following Maven dependency: + diff --git a/docs/src/main/asciidoc/microprofile-metrics.adoc b/docs/src/main/asciidoc/microprofile-metrics.adoc index 57fc2e367a06d..bc1438a5bff3f 100644 --- a/docs/src/main/asciidoc/microprofile-metrics.adoc +++ b/docs/src/main/asciidoc/microprofile-metrics.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - MicroProfile Metrics @@ -21,6 +21,9 @@ Apart from application-specific metrics, which are described in this guide, you exposed by various Quarkus extensions. These are described in the guide for each particular extension that supports built-in metrics. +IMPORTANT: link:micrometer[Micrometer] is the recommended approach to metrics for Quarkus. +Use the MicroProfile Metrics extension when it's required to retain MicroProfile specification compatibility. + == Prerequisites To complete this guide, you need: @@ -48,12 +51,14 @@ The solution is located in the `microprofile-metrics-quickstart` {quickstarts-tr First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=microprofile-metrics-quickstart \ - -Dextensions="smallrye-metrics" + -DclassName="org.acme.microprofile.metrics.PrimeNumberResource" \ + -Dpath="/" \ + -Dextensions="resteasy,smallrye-metrics" cd microprofile-metrics-quickstart ---- @@ -104,6 +109,7 @@ import org.jboss.resteasy.annotations.jaxrs.PathParam; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; @Path("/") public class PrimeNumberChecker { @@ -112,7 +118,7 @@ public class PrimeNumberChecker { @GET @Path("/{number}") - @Produces("text/plain") + @Produces(MediaType.TEXT_PLAIN) @Counted(name = "performedChecks", description = "How many primality checks have been performed.") @Timed(name = "checksTimer", description = "A measure of how long it takes to perform the primality test.", unit = MetricUnits.MILLISECONDS) public String checkIfPrime(@PathParam long number) { @@ -154,7 +160,7 @@ To run the microservice in dev mode, use `./mvnw clean compile quarkus:dev` === Generate some values for the metrics First, ask the endpoint whether some numbers are prime numbers. -[source,shell] +[source,bash] ---- curl localhost:8080/350 ---- @@ -163,7 +169,7 @@ The application will respond that 350 is not a prime number because it can be di Now for some large prime number so that the test takes a bit more time: -[source,shell] +[source,bash] ---- curl localhost:8080/629521085409773 ---- @@ -172,7 +178,7 @@ The application will respond that 629521085409773 is a prime number. If you want, try some more calls with numbers of your choice. === Review the generated metrics -To view the metrics, execute `curl -H"Accept: application/json" localhost:8080/metrics/application` +To view the metrics, execute `curl -H"Accept: application/json" localhost:8080/q/metrics/application` You will receive a response such as: [source,json] diff --git a/docs/src/main/asciidoc/mongodb-panache.adoc b/docs/src/main/asciidoc/mongodb-panache.adoc index 35096915171b7..bb1d3e2d90ca5 100644 --- a/docs/src/main/asciidoc/mongodb-panache.adoc +++ b/docs/src/main/asciidoc/mongodb-panache.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Simplified MongoDB with Panache @@ -63,18 +63,18 @@ The solution is located in the `mongodb-panache-quickstart` {quickstarts-tree-ur First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=mongodb-panache-quickstart \ -DclassName="org.acme.mongodb.panache.PersonResource" \ -Dpath="/persons" \ - -Dextensions="resteasy-jsonb,mongodb-panache" + -Dextensions="resteasy,resteasy-jackson,mongodb-panache" cd mongodb-panache-quickstart ---- -This command generates a Maven structure importing the RESTEasy/JAX-RS, JSON-B and MongoDB with Panache extensions. +This command generates a Maven structure importing the RESTEasy/JAX-RS, Jackson and MongoDB with Panache extensions. After this, the `quarkus-mongodb-panache` extension has been added to your `pom.xml`. If you don't want to generate a new project, add the dependency in your `pom.xml`: @@ -151,6 +151,8 @@ If you need to write accessors, you can: [source,java] ---- public class Person extends PanacheMongoEntity { + + @JsonProperty public String name; public LocalDate birth; public Status status; @@ -182,7 +184,7 @@ person.name = "Loïc"; person.birth = LocalDate.of(1910, Month.FEBRUARY, 1); person.status = Status.Alive; -// persist it +// persist it: if you keep the default ObjectId ID field, it will be populated by the MongoDB driver person.persist(); person.status = Status.Dead; @@ -197,6 +199,8 @@ person.delete(); List allPersons = Person.listAll(); // finding a specific person by ID +// here we build a new ObjectId but you can also retrieve it from the existing entity after being persisted +ObjectId personId = new ObjectId(idAsString); person = Person.findById(personId); // finding a specific person by ID via an Optional @@ -348,7 +352,7 @@ person.name = "Loïc"; person.birth = LocalDate.of(1910, Month.FEBRUARY, 1); person.status = Status.Alive; -// persist it +// persist it: if you keep the default ObjectId ID field, it will be populated by the MongoDB driver personRepository.persist(person); person.status = Status.Dead; @@ -363,6 +367,8 @@ personRepository.delete(person); List allPersons = personRepository.listAll(); // finding a specific person by ID +// here we build a new ObjectId but you can also retrieve it from the existing entity after being persisted +ObjectId personId = new ObjectId(idAsString); person = personRepository.findById(personId); // finding a specific person by ID via an Optional @@ -507,6 +513,10 @@ Here are some query examples: - `lastname is not null` will be mapped to `{'lastname':{'$exists': true}}` - `status in ?1` will be mapped to `{'status':{$in: [?1]}}` +WARNING: MongoDB queries must be valid JSON documents, +using the same field multiple times in a query is not allowed using PanacheQL as it would generate an invalid JSON +(see link:https://github.com/quarkusio/quarkus/issues/12086[this issue on github]). + We also handle some basic date type transformations: all fields of type `Date`, `LocalDate`, `LocalDateTime` or `Instant` will be mapped to the link:https://docs.mongodb.com/manual/reference/bson-types/#document-bson-type-date[BSON Date] using the `ISODate` type (UTC datetime). The MongoDB POJO codec doesn't support `ZonedDateTime` and `OffsetDateTime` so you should convert them prior usage. @@ -652,7 +662,7 @@ WARNING: MongoDB offers ACID transactions since version 4.0. MongoDB with Panach IDs are often a touchy subject. In MongoDB, they are usually auto-generated by the database with an `ObjectId` type. In MongoDB with Panache the ID are defined by a field named `id` of the `org.bson.types.ObjectId` type, -but if you want ot customize them, once again we have you covered. +but if you want to customize them, once again we have you covered. You can specify your own ID strategy by extending `PanacheMongoEntityBase` instead of `PanacheMongoEntity`. Then you just declare whatever ID you want as a public field by annotating it by `@BsonId`: @@ -687,7 +697,21 @@ you need to provide the value by yourself. ==== `ObjectId` can be difficult to use if you want to expose its value in your REST service. -So we created JSON-B and Jackson providers to serialize/deserialize them as a `String` which are automatically registered if your project depends on either the RESTEasy JSON-B extension or the RESTEasy Jackson extension. +So we created Jackson and JSON-B providers to serialize/deserialize them as a `String` which are automatically registered if your project depends on either the RESTEasy Jackson extension or the RESTEasy JSON-B extension. + +[IMPORTANT] +==== +If you use the standard `ObjectId` ID type, don't forget to retrieve your entity by creating a new `ObjectId` when the identifier comes from a path parameter. For example: + +[source,java] +---- +@GET +@Path("/{id}") +public Person findById(@PathParam("id") String id) { + return Person.findById(new ObjectId(id)); +} +---- +==== == Working with Kotlin Data classes @@ -795,21 +819,24 @@ person.name = "Loïc"; person.birth = LocalDate.of(1910, Month.FEBRUARY, 1); person.status = Status.Alive; -// persist it -Uni cs1 = person.persist(); +// persist it: if you keep the default ObjectId ID field, it will be populated by the MongoDB driver, +// and accessible when uni1 will be resolved +Uni uni1 = person.persist(); person.status = Status.Dead; // Your must call update() in order to send your entity modifications to MongoDB -Uni cs2 = person.update(); +Uni uni2 = person.update(); // delete it -Uni cs3 = person.delete(); +Uni uni3 = person.delete(); // getting a list of all persons Uni> allPersons = ReactivePerson.listAll(); // finding a specific person by ID +// here we build a new ObjectId but you can also retrieve it from the existing entity after being persisted +ObjectId personId = new ObjectId(idAsString); Uni personById = ReactivePerson.findById(personId); // finding a specific person by ID via an Optional @@ -911,11 +938,11 @@ public class PanacheFunctionalityTest { Assertions.assertEquals(0, Person.count()); // Now let's specify the return value - Mockito.when(Person.count()).thenReturn(23l); + Mockito.when(Person.count()).thenReturn(23L); Assertions.assertEquals(23, Person.count()); // Now let's change the return value - Mockito.when(Person.count()).thenReturn(42l); + Mockito.when(Person.count()).thenReturn(42L); Assertions.assertEquals(42, Person.count()); // Now let's call the original method @@ -927,13 +954,13 @@ public class PanacheFunctionalityTest { // Mock only with specific parameters Person p = new Person(); - Mockito.when(Person.findById(12l)).thenReturn(p); - Assertions.assertSame(p, Person.findById(12l)); - Assertions.assertNull(Person.findById(42l)); + Mockito.when(Person.findById(12L)).thenReturn(p); + Assertions.assertSame(p, Person.findById(12L)); + Assertions.assertNull(Person.findById(42L)); // Mock throwing - Mockito.when(Person.findById(12l)).thenThrow(new WebApplicationException()); - Assertions.assertThrows(WebApplicationException.class, () -> Person.findById(12l)); + Mockito.when(Person.findById(12L)).thenThrow(new WebApplicationException()); + Assertions.assertThrows(WebApplicationException.class, () -> Person.findById(12L)); // We can even mock your custom methods Mockito.when(Person.findOrdered()).thenReturn(Collections.emptyList()); @@ -1002,11 +1029,11 @@ public class PanacheFunctionalityTest { Assertions.assertEquals(0, personRepository.count()); // Now let's specify the return value - Mockito.when(personRepository.count()).thenReturn(23l); + Mockito.when(personRepository.count()).thenReturn(23L); Assertions.assertEquals(23, personRepository.count()); // Now let's change the return value - Mockito.when(personRepository.count()).thenReturn(42l); + Mockito.when(personRepository.count()).thenReturn(42L); Assertions.assertEquals(42, personRepository.count()); // Now let's call the original method @@ -1018,13 +1045,13 @@ public class PanacheFunctionalityTest { // Mock only with specific parameters Person p = new Person(); - Mockito.when(personRepository.findById(12l)).thenReturn(p); - Assertions.assertSame(p, personRepository.findById(12l)); - Assertions.assertNull(personRepository.findById(42l)); + Mockito.when(personRepository.findById(12L)).thenReturn(p); + Assertions.assertSame(p, personRepository.findById(12L)); + Assertions.assertNull(personRepository.findById(42L)); // Mock throwing - Mockito.when(personRepository.findById(12l)).thenThrow(new WebApplicationException()); - Assertions.assertThrows(WebApplicationException.class, () -> personRepository.findById(12l)); + Mockito.when(personRepository.findById(12L)).thenThrow(new WebApplicationException()); + Assertions.assertThrows(WebApplicationException.class, () -> personRepository.findById(12L)); Mockito.when(personRepository.findOrdered()).thenReturn(Collections.emptyList()); Assertions.assertTrue(personRepository.findOrdered().isEmpty()); diff --git a/docs/src/main/asciidoc/mongodb.adoc b/docs/src/main/asciidoc/mongodb.adoc index 50c53ee202163..020637917b5a8 100644 --- a/docs/src/main/asciidoc/mongodb.adoc +++ b/docs/src/main/asciidoc/mongodb.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the MongoDB Client include::./attributes.adoc[] @@ -41,18 +41,18 @@ The solution is located in the `mongodb-quickstart` {quickstarts-tree-url}/mongo First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=mongodb-quickstart \ -DclassName="org.acme.mongodb.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jsonb,mongodb-client,resteasy-mutiny,context-propagation" + -Dextensions="resteasy,resteasy-jackson,mongodb-client,resteasy-mutiny,context-propagation" cd mongodb-quickstart ---- -This command generates a Maven structure importing the RESTEasy/JAX-RS, JSON-B, Mutiny, Context Propagation and MongoDB Client extensions. +This command generates a Maven structure importing the RESTEasy/JAX-RS, Jackson, Mutiny, Context Propagation and MongoDB Client extensions. After this, the quarkus-mongodb-client extension has been added to your `pom.xml`. If you already have your Quarkus project configured, you can add the `mongodb-client` extension @@ -89,6 +89,7 @@ public class Fruit { private String name; private String description; + private String id; public Fruit() { } @@ -129,6 +130,14 @@ public class Fruit { public int hashCode() { return Objects.hash(this.name); } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } } ---- @@ -191,8 +200,6 @@ Now, edit the `org.acme.mongodb.FruitResource` class as follows: [source,java] ---- @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { @Inject FruitService fruitService; @@ -260,11 +267,11 @@ multiple clients are **not** necessary as a single client is able to access all === Named Mongo client Injection -When using multiple clients, each `MongoClient`, you can select the client to inject using the `io.quarkus.mongodb.runtime.MongoClientName` qualifier. +When using multiple clients, each `MongoClient`, you can select the client to inject using the `io.quarkus.mongodb.MongoClientName` qualifier. Using the above properties to configure three different clients, you can also inject each one as follows: [source,java,indent=0] --- +---- @Inject MongoClient defaultMongoClient; @@ -275,13 +282,18 @@ MongoClient mongoClient1; @Inject @MongoClientName("inventory") ReactiveMongoClient mongoClient2; --- +---- + +[NOTE] +==== +The old `io.quarkus.mongodb.runtime.MongoClientName` has been deprecated. It still works, but will soon be removed, therefore users are encouraged to not use it. +==== == Running a MongoDB Database As by default, `MongoClient` is configured to access a local MongoDB database on port 27017 (the default MongoDB port), if you have a local running database on this port, there is nothing more to do before being able to test it! If you want to use Docker to run a MongoDB database, you can use the following command to launch one: -[source,shell] +[source,bash] ---- docker run -ti --rm -p 27017:27017 mongo:4.0 ---- @@ -559,25 +571,30 @@ a discriminator field inside the document. It can be useful when working with ab Quarkus will automatically register all the classes annotated with `@BsonDiscriminator` with the POJO codec. +The POJO Codec have enhanced generic support via `PropertyCodecProvider`, +Quarkus will automatically register any `PropertyCodecProvider` with the POJO Codec. +When building native executables and using generic types, you might need to register the type arguments for reflection. + == Simplifying MongoDB with Panache The link:mongodb-panache[MongoDB with Panache] extension facilitates the usage of MongoDB by providing active record style entities (and repositories) like you have in link:hibernate-orm-panache.html[Hibernate ORM with Panache] and focuses on making your entities trivial and fun to write in Quarkus. == Connection Health Check -If you are using the `quarkus-smallrye-health` extension, `quarkus-mongodb` will automatically add a readiness health check +If you are using the `quarkus-smallrye-health` extension, `quarkus-mongodb-client` will automatically add a readiness health check to validate the connection to the cluster. -So when you access the `/health/ready` endpoint of your application you will have information about the connection validation status. +So when you access the `/q/health/ready` endpoint of your application you will have information about the connection validation status. This behavior can be disabled by setting the `quarkus.mongodb.health.enabled` property to `false` in your `application.properties`. == Metrics -If you are using the `quarkus-smallrye-metrics` extension, `quarkus-mongodb` can provide metrics about the connection pools. +If you are using the `quarkus-micrometer` or `quarkus-smallrye-metrics` extension, `quarkus-mongodb-client` can provide metrics about the connection pools. This behavior must first be enabled by setting the `quarkus.mongodb.metrics.enabled` property to `true` in your `application.properties`. -So when you access the `/metrics` endpoint of your application you will have information about the connection pool status in the `vendor` scope. +So when you access the `/q/metrics` endpoint of your application you will have information about the connection pool status. +When using link:microprofile-metrics[SmallRye Metrics], connection pool metrics will be available under the `vendor` scope. == The legacy client @@ -609,7 +626,7 @@ quarkus.mongodb.tls-insecure=true # only if TLS certificate cannot be validated You can then build a native executable with the usual command `./mvnw package -Pnative`. -Running it is as simple as executing `./target/mongodb-quickstart-1.0-SNAPSHOT-runner`. +Running it is as simple as executing `./target/mongodb-quickstart-1.0.0-SNAPSHOT-runner`. You can then point your browser to `http://localhost:8080/fruits.html` and use your application. diff --git a/docs/src/main/asciidoc/native-and-ssl.adoc b/docs/src/main/asciidoc/native-and-ssl.adoc index b978b8627bf9b..352647f88bac5 100644 --- a/docs/src/main/asciidoc/native-and-ssl.adoc +++ b/docs/src/main/asciidoc/native-and-ssl.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using SSL With Native Executables @@ -20,7 +20,7 @@ To complete this guide, you need: * less than 20 minutes * an IDE -* GraalVM installed with `JAVA_HOME` and `GRAALVM_HOME` configured appropriately +* GraalVM (Java 11) installed with `JAVA_HOME` and `GRAALVM_HOME` configured appropriately * Apache Maven {maven-version} This guide is based on the REST client guide so you should get this Maven project first. @@ -32,34 +32,44 @@ The project is located in the `rest-client-quickstart` {quickstarts-tree-url}/re == Looks like it works out of the box?!? If you open the application's configuration file (`src/main/resources/application.properties`), you can see the following line: -``` + +[source,properties] +---- org.acme.restclient.CountriesService/mp-rest/url=https://restcountries.eu/rest -``` +---- which configures our REST client to connect to an SSL REST service. Now let's build the application as a native executable and run the tests: -``` + +[source,bash] +---- ./mvnw clean install -Pnative -``` +---- And we obtain the following result: -``` + +[source] +---- [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ -``` +---- So, yes, it appears it works out of the box and this guide is pretty useless. It's not. The magic happens when building the native executable: -``` -[INFO] [io.quarkus.creator.phase.nativeimage.NativeImagePhase] /opt/graalvm/bin/native-image -J-Djava.util.logging.manager=org.jboss.logmanager.LogManager -J-Dcom.sun.xml.internal.bind.v2.bytecode.ClassTailor.noOptimize=true -H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime -jar rest-client-1.0-SNAPSHOT-runner.jar -J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1 -H:+PrintAnalysisCallTree -H:EnableURLProtocols=http,https --enable-all-security-services -H:-SpawnIsolates -H:+JNI --no-server -H:-UseServiceLoaderFeature -H:+StackTrace -``` + +[source] +---- +[INFO] [io.quarkus.creator.phase.nativeimage.NativeImagePhase] /opt/graalvm/bin/native-image -J-Djava.util.logging.manager=org.jboss.logmanager.LogManager -J-Dcom.sun.xml.internal.bind.v2.bytecode.ClassTailor.noOptimize=true -H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime -jar rest-client-1.0.0-SNAPSHOT-runner.jar -J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1 -H:+PrintAnalysisCallTree -H:EnableURLProtocols=http,https --enable-all-security-services -H:-SpawnIsolates -H:+JNI --no-server -H:-UseServiceLoaderFeature -H:+StackTrace +---- The important elements are these 3 options: -``` + +[source,bash] +---- -H:EnableURLProtocols=http,https --enable-all-security-services -H:+JNI -``` +---- They enable the native SSL support for your native executable. @@ -67,7 +77,7 @@ As SSL is de facto the standard nowadays, we decided to enable its support autom * the Agroal connection pooling extension (`quarkus-agroal`), * the Amazon DynamoDB extension (`quarkus-amazon-dynamodb`), - * the Hibernate Search Elasticsearch extension (`quarkus-hibernate-search-elasticsearch`), + * the Hibernate Search Elasticsearch extension (`quarkus-hibernate-search-orm-elasticsearch`), * the Infinispan Client extension (`quarkus-infinispan-client`). * the Jaeger extension (`quarkus-jaeger`), * the JGit extension (`quarkus-jgit`), @@ -82,14 +92,17 @@ As SSL is de facto the standard nowadays, we decided to enable its support autom * the Reactive client for PostgreSQL extension (`quarkus-reactive-pg-client`), * the Reactive client for MySQL extension (`quarkus-reactive-mysql-client`), * the Reactive client for DB2 extension (`quarkus-reactive-db2-client`). + * the Cassandra client extensions (`cassandra-quarkus-client`) As long as you have one of those extensions in your project, the SSL support will be enabled by default. Now, let's just check the size of our native executable as it will be useful later: -``` -$ ls -lh target/rest-client-quickstart-1.0-SNAPSHOT-runner --rwxrwxr-x. 1 gandrian gandrian 46M Jun 11 13:01 target/rest-client-quickstart-1.0-SNAPSHOT-runner -``` + +[source,shell] +---- +$ ls -lh target/rest-client-quickstart-1.0.0-SNAPSHOT-runner +-rwxrwxr-x. 1 gandrian gandrian 46M Jun 11 13:01 target/rest-client-quickstart-1.0.0-SNAPSHOT-runner +---- == Let's disable SSL and see how it goes @@ -100,50 +113,66 @@ So if you are sure you don't need it, you can disable it entirely. First, let's disable it without changing the REST service URL and see how it goes. Open `src/main/resources/application.properties` and add the following line: -``` + +[source,properties] +---- quarkus.ssl.native=false -``` +---- And let's try to build again: -``` + +[source,bash] +---- ./mvnw clean install -Pnative -``` +---- The native executable tests will fail with the following error: -``` + +[source] +---- Exception handling request to /country/name/greece: com.oracle.svm.core.jdk.UnsupportedFeatureError: Accessing an URL protocol that was not enabled. The URL protocol https is supported but not enabled by default. It must be enabled by adding the --enable-url-protocols=https option to the native-image command. -``` +---- This error is the one you obtain when trying to use SSL while it was not explicitly enabled in your native executable. Now, let's change the REST service URL to **not** use SSL in `src/main/resources/application.properties`: -``` + +[source,properties] +---- org.acme.restclient.CountriesService/mp-rest/url=http://restcountries.eu/rest -``` +---- And build again: -``` + +[source,bash] +---- ./mvnw clean install -Pnative -``` +---- If you check carefully the native executable build options, you can see that the SSL related options are gone: -``` -[INFO] [io.quarkus.creator.phase.nativeimage.NativeImagePhase] /opt/graalvm/bin/native-image -J-Djava.util.logging.manager=org.jboss.logmanager.LogManager -J-Dcom.sun.xml.internal.bind.v2.bytecode.ClassTailor.noOptimize=true -H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime -jar rest-client-1.0-SNAPSHOT-runner.jar -J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1 -H:+PrintAnalysisCallTree -H:EnableURLProtocols=http -H:-SpawnIsolates -H:+JNI --no-server -H:-UseServiceLoaderFeature -H:+StackTrace -``` + +[source] +---- +[INFO] [io.quarkus.creator.phase.nativeimage.NativeImagePhase] /opt/graalvm/bin/native-image -J-Djava.util.logging.manager=org.jboss.logmanager.LogManager -J-Dcom.sun.xml.internal.bind.v2.bytecode.ClassTailor.noOptimize=true -H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime -jar rest-client-1.0.0-SNAPSHOT-runner.jar -J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1 -H:+PrintAnalysisCallTree -H:EnableURLProtocols=http -H:-SpawnIsolates -H:+JNI --no-server -H:-UseServiceLoaderFeature -H:+StackTrace +---- And we end up with: -``` + +[source] +---- [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ -``` +---- You remember we checked the size of the native executable with SSL enabled? Let's check again with SSL support entirely disabled: -``` -$ ls -lh target/rest-client-quickstart-1.0-SNAPSHOT-runner --rwxrwxr-x. 1 gandrian gandrian 35M Jun 11 13:06 target/rest-client-quickstart-1.0-SNAPSHOT-runner -``` + +[source,shell] +---- +$ ls -lh target/rest-client-quickstart-1.0.0-SNAPSHOT-runner +-rwxrwxr-x. 1 gandrian gandrian 35M Jun 11 13:06 target/rest-client-quickstart-1.0.0-SNAPSHOT-runner +---- Yes, it is now **35 MB** whereas it used to be **46 MB**. SSL comes with a 11 MB overhead in native executable size. @@ -152,14 +181,18 @@ And there's more to it. == Let's start again with a clean slate Let's revert the changes we made to the configuration file and go back to SSL with the following command: -``` + +[source,bash] +---- git checkout -- src/main/resources/application.properties -``` +---- And let's build the native executable again: -``` + +[source,bash] +---- ./mvnw clean install -Pnative -``` +---- [#the-truststore-path] == The TrustStore path @@ -179,9 +212,10 @@ As a consequence, system properties such as `javax.net.ssl.trustStore` do not ha run time, so when the defaults need to be changed, these system properties must be provided at image build time. The easiest way to do so is by setting `quarkus.native.additional-build-args`. For example: -``` +[source,bash] +---- quarkus.native.additional-build-args=-J-Djavax.net.ssl.trustStore=/tmp/mycerts,-J-Djavax.net.ssl.trustStorePassword=changeit -``` +---- will ensure that the certificates of `/tmp/mycerts` are baked into the native binary and used *in addition* to the default cacerts. diff --git a/docs/src/main/asciidoc/neo4j.adoc b/docs/src/main/asciidoc/neo4j.adoc index bfe88c56d9521..d5546cedfc8c6 100644 --- a/docs/src/main/asciidoc/neo4j.adoc +++ b/docs/src/main/asciidoc/neo4j.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Neo4j :neo4j_version: 4.0.0 @@ -99,7 +99,7 @@ To complete this guide, you need: The easiest way to start a Neo4j instance is a locally installed Docker environment. -[source,shell,subs="verbatim,attributes"] +[source,bash,subs="verbatim,attributes"] ---- docker run --publish=7474:7474 --publish=7687:7687 -e 'NEO4J_AUTH=neo4j/secret' neo4j:{neo4j_version} ---- @@ -122,12 +122,13 @@ It contains a very simple UI to use the JAX-RS resources created here, too. First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=neo4j-quickstart \ - -Dextensions="neo4j,resteasy-jsonb" + -DclassName="org.acme.datasource.GreetingResource" \ + -Dextensions="resteasy,neo4j,resteasy-jackson" cd neo4j-quickstart ---- @@ -141,10 +142,10 @@ It generates: * an associated test The Neo4j extension has been added already to your `pom.xml`. -In addition, we added `resteasy-jsonb`, which allows us to expose `Fruit` instances over HTTP in the JSON format via JAX-RS resources. +In addition, we added `resteasy-jackson`, which allows us to expose `Fruit` instances over HTTP in the JSON format via JAX-RS resources. If you have an already created project, the `neo4j` extension can be added to an existing Quarkus project with the `add-extension` command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="neo4j" ---- @@ -163,7 +164,7 @@ Otherwise, you can manually add this to the dependencies section of your `pom.xm The Neo4j driver can be configured with standard Quarkus properties: -[source] +[source,properties] .src/main/resources/application.properties ---- quarkus.neo4j.uri = bolt://localhost:7687 @@ -200,16 +201,11 @@ Add a `FruitResource` skeleton like this and `@Inject` a `org.neo4j.driver.Drive package org.acme.neo4j; import javax.inject.Inject; -import javax.ws.rs.Consumes; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; import org.neo4j.driver.Driver; @Path("fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { @Inject @@ -247,14 +243,14 @@ public CompletionStage get() { Now start Quarkus in `dev` mode with: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- and retrieve the endpoint like this -[source, shell] +[source,bash] ---- curl localhost:8080/fruits ---- @@ -291,7 +287,7 @@ The node is returned, a `Fruit` entity created and then mapped to a `201` create A curl request against this path may look like this: -[source, shell] +[source,bash] ---- curl -v -X "POST" "http://localhost:8080/fruits" \ -H 'Content-Type: application/json; charset=utf-8' \ @@ -340,7 +336,7 @@ public CompletionStage getSingle(@PathParam("id") Long id) { A request may look like this: -[source, shell] +[source,bash] ---- curl localhost:8080/fruits/42 ---- @@ -374,7 +370,7 @@ public CompletionStage delete(@PathParam("id") Long id) { A request may look like this -[source, shell] +[source,bash] ---- curl -X DELETE localhost:8080/fruits/42 ---- @@ -389,7 +385,7 @@ The Cypher manual linked in the introduction will help you with modelling your q === Packaging Packaging your application is as simple as `./mvnw clean package`. -It can be run with `java -jar target/neo4j-quickstart-1.0-SNAPSHOT-runner.jar`. +It can be run with `java -jar target/quarkus-app/quarkus-run.jar`. With GraalVM installed, you can also create a native executable binary: `./mvnw clean package -Dnative`. Depending on your system, that will take some time. @@ -399,10 +395,15 @@ Depending on your system, that will take some time. If you are using the `quarkus-smallrye-health` extension, `quarkus-neo4j` will automatically add a readiness health check to validate the connection to Neo4j. -So when you access the `/health/ready` endpoint of your application you will have information about the connection validation status. +So when you access the `/q/health/ready` endpoint of your application you will have information about the connection validation status. This behavior can be disabled by setting the `quarkus.neo4j.health.enabled` property to `false` in your `application.properties`. +=== Driver metrics + +If you are using a metrics extension and specify the config property `quarkus.neo4j.pool.metrics-enabled=true`, the Neo4j extension will +expose several metrics related to the Neo4j connection pool. + === Explore Cypher and the Graph There are tons of options to model your domain within a Graph. diff --git a/docs/src/main/asciidoc/openapi-swaggerui.adoc b/docs/src/main/asciidoc/openapi-swaggerui.adoc index 72cdca1dbc81b..0b973fc5d1cb1 100644 --- a/docs/src/main/asciidoc/openapi-swaggerui.adoc +++ b/docs/src/main/asciidoc/openapi-swaggerui.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OpenAPI and Swagger UI @@ -37,14 +37,14 @@ The solution is located in the `openapi-swaggerui-quickstart` {quickstarts-tree- First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=openapi-swaggerui-quickstart \ -DclassName="org.acme.openapi.swaggerui.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jsonb" + -Dextensions="resteasy,resteasy-jackson" cd openapi-swaggerui-quickstart ---- @@ -82,16 +82,11 @@ import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.DELETE; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.Consumes; -import javax.ws.rs.core.MediaType; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Set; @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { private Set fruits = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); @@ -183,7 +178,7 @@ specification in order to generate your API https://github.com/OAI/OpenAPI-Speci You just need to add the `openapi` extension to your Quarkus application: -[source, shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="quarkus-smallrye-openapi" ---- @@ -200,16 +195,16 @@ This will add the following to your `pom.xml`: Now, we are ready to run our application: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- -Once your application is started, you can make a request to the default `/openapi` endpoint: +Once your application is started, you can make a request to the default `/q/openapi` endpoint: -[source, shell] +[source,shell] ---- -$ curl http://localhost:8080/openapi +$ curl http://localhost:8080/q/openapi openapi: 3.0.3 info: title: Generated API @@ -256,7 +251,7 @@ components: [NOTE] ==== -If you do not like the default endpoint location `/openapi`, you can change it by adding the following configuration to your `application.properties`: +If you do not like the default endpoint location `/q/openapi`, you can change it by adding the following configuration to your `application.properties`: [source, properties] ---- quarkus.smallrye-openapi.path=/swagger @@ -296,7 +291,7 @@ empty class can then be annotated with various OpenAPI annotations such as `@Ope email = "techsupport@example.com"), license = @License( name = "Apache 2.0", - url = "http://www.apache.org/licenses/LICENSE-2.0.html")) + url = "https://www.apache.org/licenses/LICENSE-2.0.html")) ) public class ExampleApiApplication extends Application { } @@ -319,7 +314,7 @@ mp.openapi.extensions.smallrye.info.contact.email=techsupport@example.com mp.openapi.extensions.smallrye.info.contact.name=Example API Support mp.openapi.extensions.smallrye.info.contact.url=http://exampleurl.com/contact mp.openapi.extensions.smallrye.info.license.name=Apache 2.0 -mp.openapi.extensions.smallrye.info.license.url=http://www.apache.org/licenses/LICENSE-2.0.html +mp.openapi.extensions.smallrye.info.license.url=https://www.apache.org/licenses/LICENSE-2.0.html ---- This will give you similar information as the `@OpenAPIDefinition` example above. @@ -333,7 +328,7 @@ An OpenAPI document that conforms to the OpenAPI Specification is itself a vali To see this in action, we'll put OpenAPI documentation under `META-INF/openapi.yaml` for our `/fruits` endpoints. Quarkus also supports alternative <> if you prefer. -[source, shell] +[source,yaml] ---- openapi: 3.0.1 info: @@ -342,7 +337,7 @@ info: version: "1.0" servers: - - url: http://localhost:8080/openapi + - url: http://localhost:8080/q/openapi description: Optional dev mode server description paths: @@ -384,10 +379,10 @@ components: name: type: string ---- -By default, a request to `/openapi` will serve the combined OpenAPI document from the static file and the model generated from application endpoints code. +By default, a request to `/q/openapi` will serve the combined OpenAPI document from the static file and the model generated from application endpoints code. We can however change this to only serve the static OpenAPI document by adding `mp.openapi.scan.disable=true` configuration into `application.properties`. -Now, a request to `/openapi` endpoint will serve the static OpenAPI document instead of the generated one. +Now, a request to `/q/openapi` endpoint will serve the static OpenAPI document instead of the generated one. [[open-document-paths]] [TIP] @@ -416,7 +411,7 @@ will be used. You can also define the version in SmallRye using the following co mp.openapi.extensions.smallrye.openapi=3.0.2 ---- -This might be useful if your API go through a Gateway that needs a certain version. +This might be useful if your API goes through a Gateway that needs a certain version. == Auto-generation of Operation Id @@ -466,23 +461,24 @@ This is a build time property, it cannot be changed at runtime after your applic ==== -By default, Swagger UI is accessible at `/swagger-ui`. +By default, Swagger UI is accessible at `/q/swagger-ui`. -You can update this path by setting the `quarkus.swagger-ui.path` property in your `application.properties`: +You can update the `/swagger-ui` sub path by setting the `quarkus.swagger-ui.path` property in your `application.properties`: [source, properties] ---- -quarkus.swagger-ui.path=/my-custom-path +quarkus.swagger-ui.path=my-custom-path ---- [WARNING] ==== The value `/` is not allowed as it blocks the application from serving anything else. +A value prefixed with '/' makes it absolute and not relative. ==== Now, we are ready to run our application: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -491,10 +487,10 @@ You can check the Swagger UI path in your application's log: [source] ---- -00:00:00,000 INFO [io.qua.swa.run.SwaggerUiServletExtension] Swagger UI available at /swagger-ui +00:00:00,000 INFO [io.qua.swa.run.SwaggerUiServletExtension] Swagger UI available at /q/swagger-ui ---- -Once your application is started, you can go to http://localhost:8080/swagger-ui and play with your API. +Once your application is started, you can go to http://localhost:8080/q/swagger-ui and play with your API. You can visualize your API's operations and schemas. image:openapi-swaggerui-guide-screenshot01.png[alt=Visualize your API] diff --git a/docs/src/main/asciidoc/opentracing.adoc b/docs/src/main/asciidoc/opentracing.adoc index da30a960035ef..9195e6f6fe991 100644 --- a/docs/src/main/asciidoc/opentracing.adoc +++ b/docs/src/main/asciidoc/opentracing.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OpenTracing @@ -39,14 +39,14 @@ The solution is located in the `opentracing-quickstart` {quickstarts-tree-url}/o First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=opentracing-quickstart \ -DclassName="org.acme.opentracing.TracedResource" \ -Dpath="/hello" \ - -Dextensions="quarkus-smallrye-opentracing" + -Dextensions="resteasy,quarkus-smallrye-opentracing" cd opentracing-quickstart ---- @@ -108,12 +108,12 @@ There are two ways to configure the Jaeger tracer within the application. The first approach is by providing the properties within the `src/main/resources/application.properties` file: -[source,shell] +[source,properties] ---- quarkus.jaeger.service-name=myservice // <1> quarkus.jaeger.sampler-type=const // <2> quarkus.jaeger.sampler-param=1 // <3> -quarkus.log.console.format=%d{HH:mm:ss} %-5p traceId=%X{traceId}, spanId=%X{spanId}, sampled=%X{sampled} [%c{2.}] (%t) %s%e%n // <4> +quarkus.log.console.format=%d{HH:mm:ss} %-5p traceId=%X{traceId}, parentId=%X{parentId}, spanId=%X{spanId}, sampled=%X{sampled} [%c{2.}] (%t) %s%e%n // <4> ---- <1> If the `quarkus.jaeger.service-name` property (or `JAEGER_SERVICE_NAME` environment variable) is not provided then a "no-op" tracer will be configured, resulting in no tracing data being reported to the backend. @@ -127,41 +127,71 @@ The second approach is to supply the properties as https://www.jaegertracing.io/ The first step is to start the tracing system to collect and display the captured traces: -[source, text] +[source,bash] ---- docker run -p 5775:5775/udp -p 6831:6831/udp -p 6832:6832/udp -p 5778:5778 -p 16686:16686 -p 14268:14268 jaegertracing/all-in-one:latest ---- Now we are ready to run our application. If using `application.properties` to configure the tracer: -[source, text] +[source,bash] ---- ./mvnw compile quarkus:dev ---- or if configuring the tracer via environment variables: -[source, text] +[source,bash] ---- ./mvnw compile quarkus:dev -Djvm.args="-DJAEGER_SERVICE_NAME=myservice -DJAEGER_SAMPLER_TYPE=const -DJAEGER_SAMPLER_PARAM=1" ---- Once both the application and tracing system are started, you can make a request to the provided endpoint: -``` +[source,shell] +---- $ curl http://localhost:8080/hello hello -``` +---- When the first request has been submitted, the Jaeger tracer within the app will be initialized: -``` + +[source] +---- 2019-10-16 09:35:23,464 INFO [io.jae.Configuration] (executor-thread-1) Initialized tracer=JaegerTracer(version=Java-0.34.0, serviceName=myservice, reporter=RemoteReporter(sender=UdpSender(), closeEnqueueTimeout=1000), sampler=ConstSampler(decision=true, tags={sampler.type=const, sampler.param=true}), tags={hostname=localhost.localdomain, jaeger.version=Java-0.34.0, ip=127.0.0.1}, zipkinSharedRpcSpan=false, expandExceptionLogs=false, useTraceId128Bit=false) -13:20:11 INFO traceId=1336b2b0a76a96a3, spanId=1336b2b0a76a96a3, sampled=true [or.ac.qu.TracedResource] (executor-thread-63) hello -``` +13:20:11 INFO traceId=1336b2b0a76a96a3, parentId=0, spanId=1336b2b0a76a96a3, sampled=true [or.ac.qu.TracedResource] (executor-thread-63) hello +---- Then visit the http://localhost:16686[Jaeger UI] to see the tracing information. Hit `CTRL+C` to stop the application. +== Tracing additional methods + +REST endpoints are automatically traced. +If you need to trace additional methods, you can use the `org.eclipse.microprofile.opentracing.Traced` annotation at class or method level. + +This can be useful to trace incoming requests from non-REST calls (like request coming from a message) or to create spans inside a trace. + +Here is an example of a `FrancophoneService` which methods are traced. + +[source, java] +---- +import javax.enterprise.context.ApplicationScoped; + +import org.eclipse.microprofile.opentracing.Traced; + +@Traced +@ApplicationScoped +public class FrancophoneService { + + public String bonjour() { + return "bonjour"; + } +} +---- + +NOTE: the best way to add OpenTracing capability to reactive messaging based applications is by adding the `Traced` annotation to all incoming methods. + == Additional instrumentation The https://github.com/opentracing-contrib[OpenTracing API Contributions project] offers additional instrumentation that can be used to add tracing to a large variety of technologies/components. @@ -193,8 +223,40 @@ quarkus.datasource.jdbc.driver=io.opentracing.contrib.jdbc.TracingDriver quarkus.hibernate-orm.dialect=org.hibernate.dialect.PostgreSQLDialect ---- + +=== Kafka + +The https://github.com/opentracing-contrib/java-kafka-client[Kafka instrumentation] will add a span for each message sent to or received from a Kafka topic. To enable it, add the following dependency to your pom.xml: + +[source, xml] +---- + + io.opentracing.contrib + opentracing-kafka-client + +---- + +It contains OpenTracing interceptors that must be registered on Kafka producers and consumers. + +If you followed the link:kafka[Kafka guide], the interceptors can be added on the `generated-price` and the `prices` channels as follows: + +[source, properties] +---- +# Configure the Kafka sink (we write to it) +mp.messaging.outgoing.generated-price.connector=smallrye-kafka +mp.messaging.outgoing.generated-price.topic=prices +mp.messaging.outgoing.generated-price.value.serializer=org.apache.kafka.common.serialization.IntegerSerializer +mp.messaging.outgoing.generated-price.interceptor.classes=io.opentracing.contrib.kafka.TracingProducerInterceptor + +# Configure the Kafka source (we read from it) +mp.messaging.incoming.prices.connector=smallrye-kafka +mp.messaging.incoming.prices.value.deserializer=org.apache.kafka.common.serialization.IntegerDeserializer +mp.messaging.incoming.prices.interceptor.classes=io.opentracing.contrib.kafka.TracingConsumerInterceptor +---- + +NOTE: `interceptor.classes` accept a list of classes separated by a comma. + [[configuration-reference]] == Jaeger Configuration Reference include::{generated-dir}/config/quarkus-jaeger.adoc[leveloffset=+1, opts=optional] - diff --git a/docs/src/main/asciidoc/optaplanner.adoc b/docs/src/main/asciidoc/optaplanner.adoc index 22f600404956c..1ef8e6ecabad2 100644 --- a/docs/src/main/asciidoc/optaplanner.adoc +++ b/docs/src/main/asciidoc/optaplanner.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = OptaPlanner - Using AI to optimize a schedule with OptaPlanner @@ -65,12 +65,13 @@ with the following extensions, for Maven or Gradle: Alternatively, generate it from the command line with Maven: -[source,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=optaplanner-quickstart \ - -Dextensions="resteasy, resteasy-jackson, optaplanner-quarkus, optaplanner-quarkus-jackson" + -Dextensions="resteasy,resteasy-jackson,optaplanner-quarkus,optaplanner-quarkus-jackson" \ + -DnoExamples cd optaplanner-quickstart ---- @@ -450,9 +451,7 @@ public class TimeTableConstraintProvider implements ConstraintProvider { // ... in the same timeslot ... Joiners.equal(Lesson::getTimeslot), // ... in the same room ... - Joiners.equal(Lesson::getRoom), - // ... and the pair is unique (different id, no reverse pairs) - Joiners.lessThan(Lesson::getId)) + Joiners.equal(Lesson::getRoom)) // then penalize each pair with a hard weight. .penalize("Room conflict", HardSoftScore.ONE_HARD); } @@ -504,7 +503,7 @@ import java.util.List; import org.optaplanner.core.api.domain.solution.PlanningEntityCollectionProperty; import org.optaplanner.core.api.domain.solution.PlanningScore; import org.optaplanner.core.api.domain.solution.PlanningSolution; -import org.optaplanner.core.api.domain.solution.drools.ProblemFactCollectionProperty; +import org.optaplanner.core.api.domain.solution.ProblemFactCollectionProperty; import org.optaplanner.core.api.domain.valuerange.ValueRangeProvider; import org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore; @@ -612,19 +611,14 @@ package org.acme.optaplanner.rest; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.inject.Inject; -import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; import org.acme.optaplanner.domain.TimeTable; import org.optaplanner.core.api.solver.SolverJob; import org.optaplanner.core.api.solver.SolverManager; @Path("/timeTable") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class TimeTableResource { @Inject @@ -859,9 +853,9 @@ You have just developed a Quarkus application with https://www.optaplanner.org/[ Now try adding database and UI integration: -. Store `Timeslot`, `Room`, and `Lesson` in the database with https://quarkus.io/guides/hibernate-orm-panache[Hibernate and Panache]. +. Store `Timeslot`, `Room`, and `Lesson` in the database with link:hibernate-orm-panache[Hibernate and Panache]. -. https://quarkus.io/guides/rest-json[Expose them through REST]. +. link:rest-json[Expose them through REST]. . Adjust the `TimeTableResource` to read and write a `TimeTable` in a single transaction and use those accordingly: @@ -885,6 +879,7 @@ import org.acme.optaplanner.domain.Room; import org.acme.optaplanner.domain.TimeTable; import org.acme.optaplanner.domain.Timeslot; import org.optaplanner.core.api.score.ScoreManager; +import org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore; import org.optaplanner.core.api.solver.SolverManager; import org.optaplanner.core.api.solver.SolverStatus; @@ -898,7 +893,7 @@ public class TimeTableResource { @Inject SolverManager solverManager; @Inject - ScoreManager scoreManager; + ScoreManager scoreManager; // To try, open http://localhost:8080/timeTable @GET diff --git a/docs/src/main/asciidoc/performance-measure.adoc b/docs/src/main/asciidoc/performance-measure.adoc index 319a66d0d6a10..5c43ca028e8c6 100644 --- a/docs/src/main/asciidoc/performance-measure.adoc +++ b/docs/src/main/asciidoc/performance-measure.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Measuring Performance @@ -60,15 +60,15 @@ Linux:: The linux https://linux.die.net/man/1/pmap[pmap] and https://linux.die.net/man/1/ps[ps] tools provide a report on the native memory map for a process [source,shell] --- +---- $ ps -o pid,rss,command -p PID RSS COMMAND - 11229 12628 ./target/getting-started-1.0-SNAPSHOT-runner --- + 11229 12628 ./target/getting-started-1.0.0-SNAPSHOT-runner +---- [source,shell] --- +---- $ pmap -x 13150: /data/quarkus-application -Xmx100m -Xmn70m @@ -84,7 +84,7 @@ Linux:: ... ---------------- ------- ------- ------- total kB 9726508 256092 220900 --- +---- Each Memory region that has been allocated for the process is listed; @@ -101,12 +101,12 @@ macOS:: On macOS, you can use `ps x -o pid,rss,command -p ` which list the RSS for a given process in KB (1024 bytes). [source,shell] --- +---- $ ps x -o pid,rss,command -p 57160 PID RSS COMMAND 57160 288548 /Applications/IntelliJ IDEA CE.app/Contents/jdk/Contents/Home/jre/bin/java --- +---- Which means IntelliJ IDEA consumes 281,8 MB of resident memory. @@ -121,7 +121,7 @@ Here is how we measure startup time in our tests. We create a sample application that logs timestamps for certain points in the application lifecycle. [source, java] --- +---- @Path("/") public class GreetingEndpoint { @@ -129,7 +129,7 @@ public class GreetingEndpoint { @GET @Path("/greeting") - @Produces("application/json") + @Produces(MediaType.APPLICATION_JSON) public Greeting greeting(@QueryParam("name") String name) { System.out.println(new SimpleDateFormat("HH:mm:ss.SSS").format(new java.util.Date(System.currentTimeMillis()))); String suffix = name != null ? name : "World"; @@ -140,27 +140,27 @@ public class GreetingEndpoint { System.out.println(new SimpleDateFormat("HH:mm:ss.SSS").format(new Date())); } } --- +---- We start looping in a shell, sending requests to the rest endpoint of the sample application we are testing. -[source, shell] --- +[source,shell] +---- $ while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8080/api/greeting)" != "200" ]]; do sleep .00001; done --- +---- In a separate terminal, we start the timing application that we are testing, printing the time the application starts -[source, shell] --- +[source,shell] +---- $ date +"%T.%3N" && ./target/quarkus-timing-runner 10:57:32.508 10:57:32.512 2019-04-05 10:57:32,512 INFO [io.quarkus] (main) Quarkus 0.11.0 started in 0.002s. Listening on: http://127.0.0.1:8080 -2019-04-05 10:57:32,512 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb] +2019-04-05 10:57:32,512 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson] 10:57:32.537 --- +---- The difference between the final timestamp and the first timestamp is the total startup time for the application to serve the first request. diff --git a/docs/src/main/asciidoc/picocli.adoc b/docs/src/main/asciidoc/picocli.adoc index cfd1c2dadff09..8e2138d65f85e 100644 --- a/docs/src/main/asciidoc/picocli.adoc +++ b/docs/src/main/asciidoc/picocli.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Command Mode with Picocli :extension-status: experimental diff --git a/docs/src/main/asciidoc/platform.adoc b/docs/src/main/asciidoc/platform.adoc new file mode 100644 index 0000000000000..084344e1ab804 --- /dev/null +++ b/docs/src/main/asciidoc/platform.adoc @@ -0,0 +1,191 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Platform + +include::./attributes.adoc[] + +The Quarkus extension ecosystem consists of the Quarkus extensions developed and maintained by the community, including the Quarkus core development team. While the Quarkus ecosystem (sometimes also referred to as the "Quarkus universe") includes all the Quarkus extensions ever developed, there is also a concept of a Quarkus platform. + +== Quarkus Platform + +The fundamental promise of a Quarkus platform is any combination of the Quarkus extensions the platform consists of can be used in the same application without causing any conflict for each other. +Each organization creating their Quarkus platform may establish their own criterions for the extensions to be accepted into the platform and the means to guarantee the compatibility between the accepted extensions. + +=== Quarkus Platform Artifacts + +Each Quarkus platform is defined with a few artifacts. + +=== Quarkus Platform BOM + +Each Quarkus Platform is expected to provide a Maven BOM artifact that + +* imports a chosen version of `io.quarkus:quarkus-bom` (the platform BOM may be flattened at the end but it has to be based on some version of `io.quarkus:quarkus-bom`) +* includes all the Quarkus extension artifacts (the runtime and the deployment ones) the platform consists of +* includes all the necessary third-party artifacts that align the transitive dependency versions of the platform extensions to guarantee compatibility between them +* includes the <> artifact +* possibly includes the <> artifacts + +Quarkus applications that want to include extensions from a Quarkus platform will be importing the Quarkus platform BOM. + +[[platform-descriptor]] +=== Quarkus Platform Descriptor + +Quarkus platform descriptor is a JSON artifact that provides information about the platform and its extensions to the Quarkus tools. E.g. http://code.quarkus.io and the Quarkus command line tools consult this descriptor to list, add and remove extensions to/from the project on user's request. +This artifact is also used as a Quarkus platform identifier. When Quarkus tools need to identify the Quarkus platform(s) used in the project, they will analyze the dependency version constraints of the project (the effective list of the managed dependencies from the `dependencyManagement` section in Maven terms) looking for the platform descriptor artifact(s) among them. Given that the platform descriptors are included into the Quarkus platform BOMs, every Quarkus application will inherit the platform descriptor artifact from the imported platform BOM(s) as a dependency version constraint (managed dependency in Maven terms). + +To be able to easily identify Quarkus platform descriptors among the project's dependency constraints, the platform descriptor Maven artifact coordinates should follow the following naming convention: + +* the `groupId` of the descriptor artifact should match the `groupId` of the corresponding Quarkus Platform BOM; +* the `artifactId` of the descriptor artifact should be the `artifactId` of the corresponding Quarkus Platform BOM with the `-quarkus-platform-descriptor` suffix; +* the `classifier` of the descriptor artifact should match the `version` of the corresponding Quarkus Platform BOM; +* the `type` of the descriptor artifact should be `json`; +* the `version` of the descriptor artifact should match the `version` of the corresponding Quarkus Platform BOM. + +As a string it will look like `:-quarkus-platform-descriptor::json:` + +E.g. the coordinates of the descriptor for Quarkus BOM `io.quarkus:quarkus-bom::pom:1.2.3` will be `io.quarkus:quarkus-bom-quarkus-platform-descriptor:1.2.3:json:1.2.3`. +And for a custom Quarkus platform defined with BOM `org.acme:acme-bom::pom:555` it will be `org.acme:acme-bom-quarkus-platform-descriptor:555:json:555`. + +The classifier matching the version of the platform may look confusing at first. But this is what turns the descriptor into a true "fingerprint" of the platform. In both Maven and Gradle, the effective set of the dependency version constraints (or the managed dependencies) is obtained by merging all the imported BOMs and version constraints specified individually in the current project and also its parent(s). The artifact `classifier` is a part of the dependency ID, which could be expressed as `groupId:artifactId:classifier:type`. Which means that if a project is based on a couple of Quarkus platforms (by importing their BOMs), e.g. `io.quarkus:quarkus-universe-bom::pom:1.2.3` and `org.acme:acme-bom::pom:555`, and these two platforms are based on different `io.quarkus:quarkus-bom::pom` versions (which is technically a Quarkus platform in its turn as well), the Quarkus tools will be able to detect this fact and make the user aware of it, since it *might* not be a safe combination. If the descriptor artifact didn't include the classifer containing the version of the platform then the tools wouldn't be able to detect a potentially incompatible mix of different versions of the same platform in the same project. + +The platform descriptor will normally be generated using a Maven plugin, e.g. + +[source,xml] +---- + + io.quarkus + quarkus-platform-descriptor-json-plugin + ${quarkus.version} <1> + + + process-resources + + generate-extensions-json <2> + + + + + ${quarkus.platform.group-id} <3> + ${quarkus.platform.artifact-id} <4> + ${quarkus.platform.version} <5> + ${overridesfile} <6> + true <7> + + +---- + +<1> the version of the `quarkus-platform-descriptor-json-plugin` +<2> `generate-extensions-json` is the goal generating the platform descriptor +<3> the `groupId` of the platform BOM +<4> the `artifactId` of the platform BOM +<5> the `version` of the platform BOM +<6> this parameter is optional, it allows to override some metadata from the Quarkus extension descriptors found in every runtime extension artifact from which the platform descriptor is generated +<7> this parameter is also optional and defaults to false. It has to be set to true in case the platform BOM *is not generated* and *is not flattened*. Which for example is the case for `io.quarkus:quarkus-bom`. + +[[platform-properties]] +=== Quarkus Platform Properties + +A Quarkus platform may provide its own default values for some of the configuration options. + +Quarkus is using https://github.com/eclipse/microprofile-config[Eclipse MicroProfile Config] for wiring application configuration. A Quarkus platform may be used as another source of configuration in the hierarchy of the configuration sources dominated by the application's `application.properties`. + +To provide platform-specific defaults, the platform needs to include a dependency version constraint in its BOM for a properties artifact whose coordinates follow the following naming convention: + +* the `groupId` of the properties artifact should match the `groupId` of the corresponding Quarkus Platform BOM; +* the `artifactId` of the properties artifact should be the `artifactId` of the corresponding Quarkus Platform BOM with the `-quarkus-platform-properties` suffix; +* the `classifier` of the descriptor artifact should be left empty/null; +* the `type` of the descriptor artifact should be `properties`; +* the `version` of the descriptor artifact should match the `version` of the corresponding Quarkus Platform BOM. + +The properties artifact itself is expected to be a traditional `properties` file that will be loaded into an instance of `java.util.Properties` class. + +IMPORTANT: At this point, platform properties are only allowed to provide the default values for a restricted set of configuration options. The property names in the platform properties file must be prefixed with the `platform.` suffix. + +Extension developers that want to make their configuration options platform-specific should set their default values to properties that start with the `platform.` suffix. Here is an example: + +[source,java] +---- +package io.quarkus.deployment.pkg; + +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public class NativeConfig { + + /** + * The docker image to use to do the image build + */ + @ConfigItem(defaultValue = "${platform.quarkus.native.builder-image}") + public String builderImage; +} +---- + +In this case the default value for `quarkus.native.builder-image` will be provided by the platform. The user will still be able to set the desired value for `quarkus.native.builder-image` in its `application.properties`, of course. But in case it's not customized by the user, the default value will be coming from the platform properties. +A platform properties file for the example above would contain (the actual value is provided as an example): + +[source,text] +---- +platform.quarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-native-image:20.2.0-java11 +---- + +There is also a Maven plugin goal that validates the platform properties content and its artifact coordinates and also checks whether the platform properties artifact is present in the platform's BOM. Here is a sample plugin configuration: + +[source,xml] +---- + + io.quarkus + quarkus-platform-descriptor-json-plugin + ${quarkus.version} + + + process-resources + + platform-properties + + + + +---- + +==== Merging Quarkus Platform Properties + +In case an application is importing more than one Quarkus platform and those platforms include their own platform properties artifacts, the content of those platform properties artifacts will be merged to form a single set of properties that will be used for the application build. +The order in which the properties artifacts are merged will correspond to the order in which they appear in the list of dependency version constraints of the application (in the Maven terms that will correspond to the effective list of application's managed dependencies, i.e. the flattened `managedDependencies` POM section). + +IMPORTANT: The content of the properties artifacts found earlier will dominate over those found later among the application's dependency constraints! + +That means if a platform needs to override a certain property value defined in the platform it is based on, it will need to include its platform properties artifact into the `managedDependencies` section of its BOM before importing the base platform. + +For example, the `quarkus-universe-bom` platform is based on the `quarkus-bom` platform. In case, the `quarkus-universe-bom` platform were to override certain properties defined in `quarkus-bom-quarkus-platform-properties` included into the `quarkus-bom` platform, the `quarkus-universe-bom` would have to be composed as +[source,xml] +---- + + + quarkus-universe-bom + Quarkus universe - BOM + pom + + + + + + io.quarkus + quarkus-universe-bom-quarkus-platform-properties + properties + ${project.version} + + + + + io.quarkus + quarkus-bom + ${quarkus.version} + pom + import + + + +---- + +That way, the `quarkus-universe-bom` platform properties will appear before the `quarkus-bom` platform properties and so will be dominating during merging. \ No newline at end of file diff --git a/docs/src/main/asciidoc/quartz.adoc b/docs/src/main/asciidoc/quartz.adoc index fb0306846abfd..dc4a5a101e455 100644 --- a/docs/src/main/asciidoc/quartz.adoc +++ b/docs/src/main/asciidoc/quartz.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Scheduling Periodic Tasks with Quartz @@ -42,14 +42,14 @@ The solution is located in the `quartz-quickstart` {quickstarts-tree-url}/quartz First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=quartz-quickstart \ -DclassName="org.acme.quartz.TaskResource" \ -Dpath="/tasks" \ - -Dextensions="quartz, hibernate-orm-panache, flyway, resteasy-jsonb, jdbc-postgresql" + -Dextensions="resteasy,quartz,hibernate-orm-panache,flyway,resteasy-jackson,jdbc-postgresql" cd quartz-quickstart ---- @@ -82,6 +82,11 @@ This will add the following to your `pom.xml`: ---- +[TIP] +==== +To use a JDBC store, the `quarkus-agroal` extension, which provides the datasource support, is also required. +==== + == Creating the Task Entity In the `org.acme.quartz` package, create the `Task` class, with the following content: @@ -110,7 +115,7 @@ public class Task extends PanacheEntity { <1> } } ---- -1. Declare the entity using https://quarkus.io/guides/hibernate-orm-panache[Panache] +1. Declare the entity using link:hibernate-orm-panache[Panache] == Creating a scheduled job @@ -130,7 +135,7 @@ import io.quarkus.scheduler.Scheduled; public class TaskBean { @Transactional - @Scheduled(every = "10s") <2> + @Scheduled(every = "10s", identity = "task-job") <2> void schedule() { Task task = new Task(); <3> task.persist(); <4> @@ -138,9 +143,9 @@ public class TaskBean { } ---- 1. Declare the bean in the _application_ scope -2. Use the `@Scheduled` annotation to instruct Quarkus to run this method every 10 seconds. +2. Use the `@Scheduled` annotation to instruct Quarkus to run this method every 10 seconds and set the unique identifier for this job. 3. Create a new `Task` with the current start time. -4. Persist the task in database using https://quarkus.io/guides/hibernate-orm-panache[Panache]. +4. Persist the task in database using link:hibernate-orm-panache[Panache]. === Scheduling Jobs Programmatically @@ -157,7 +162,7 @@ public class TaskBean { @Inject org.quartz.Scheduler quartz; <1> - void onStart(@Observes StartupEvent event) { + void onStart(@Observes StartupEvent event) throws SchedulerException { JobDetail job = JobBuilder.newJob(MyJob.class) .withIdentity("myJob", "myGroup") .build(); @@ -180,9 +185,12 @@ public class TaskBean { // A new instance of MyJob is created by Quartz for every job execution public static class MyJob implements Job { + + @Inject + TaskBean taskBean; public void execute(JobExecutionContext context) throws JobExecutionException { - Arc.container().instance(TaskBean.class).get(). performTask(); <3> + taskBean.performTask(); <3> } } @@ -190,14 +198,14 @@ public class TaskBean { ---- 1. Inject the underlying `org.quartz.Scheduler` instance. 2. Schedule a new job using the Quartz API. -3. Lookup the bean instance of `TaskBean` and invoke the `performTask()` method from the job. +3. Invoke the `TaskBean#performTask()` method from the job. Jobs are also link:cdi[container-managed] beans if they belong to a link:cdi-reference[bean archive]. NOTE: By default, the scheduler is not started unless a `@Scheduled` business method is found. You may need to force the start of the scheduler for "pure" programmatic scheduling. See also <>. == Updating the application configuration file Edit the `application.properties` file and add the below configuration: -[source,shell] +[source,properties] ---- # Quartz configuration quarkus.quartz.clustered=true <1> @@ -242,7 +250,6 @@ import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; @Path("/tasks") -@Produces(MediaType.APPLICATION_JSON) public class TaskResource { @GET @@ -346,7 +353,7 @@ services: - tasks-network postgres: <3> - image: postgres:11.3 + image: postgres:13.1 container_name: quarkus_test environment: - POSTGRES_USER=quarkus_test @@ -370,7 +377,7 @@ networks: In a separate terminal, run the below command: -[source,shell] +[source,bash] ---- docker-compose up postgres <1> ---- @@ -382,14 +389,14 @@ docker-compose up postgres <1> Run the application with: `./mvnw quarkus:dev`. After a few seconds, open another terminal and run `curl localhost:8080/tasks` to verify that we have at least one task created. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. == Packaging the application and run several instances The application can be packaged using `./mvnw clean package`. Once the build is successful, run the below command: -[source,shell] +[source,bash] ---- docker-compose up --scale tasks=2 --scale nginx=1 <1> ---- @@ -400,6 +407,33 @@ After a few seconds, in another terminal, run `curl localhost:8080/tasks` to ver You can also generate the native executable with `./mvnw clean package -Pnative`. +WARNING: It's the reponsibility of the deployer to clear/remove the previous state, i.e. stale jobs and triggers. Moreover, the applications that form the "Quartz cluster" should be identical, otherwise an unpredictable result may occur. + +[[quartz-register-plugin-listeners]] +== Registering Plugin and Listeners + +You can register `plugins`, `job-listeners` and `trigger-listeners` through Quarkus configuration. + +The example below registers the plugin `org.quartz.plugins.history.LoggingJobHistoryPlugin` named as `jobHistory` with the property `jobSuccessMessage` defined as `Job [{1}.{0}] execution complete and reports: {8}` + +[source,conf] +---- +quarkus.quartz.plugins.jobHistory.class=org.quartz.plugins.history.LoggingJobHistoryPlugin +quarkus.quartz.plugins.jobHistory.properties.jobSuccessMessage=Job [{1}.{0}] execution complete and reports: {8} +---- + +You can also register a listener programmatically with an injected `org.quartz.Scheduler`: + +[source,java] +---- +public class MyListenerManager { + void onStart(@Observes StartupEvent event, org.quartz.Scheduler scheduler) throws SchedulerException { + scheduler.getListenerManager().addJobListener(new MyJogListener()); + scheduler.getListenerManager().addTriggerListener(new MyTriggerListener()); + } +} +---- + [[quartz-configuration-reference]] == Quartz Configuration Reference diff --git a/docs/src/main/asciidoc/qute-reference.adoc b/docs/src/main/asciidoc/qute-reference.adoc index a411ab7ebd7a7..ec15b22043500 100644 --- a/docs/src/main/asciidoc/qute-reference.adoc +++ b/docs/src/main/asciidoc/qute-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Qute Reference Guide @@ -11,10 +11,15 @@ include::./attributes.adoc[] :sectnums: :sectnumlevels: 4 :toc: -:extension-status: experimental -include::./status-include.adoc[] +Qute is a templating engine designed specifically to meet the Quarkus needs. +The usage of reflection is minimized to reduce the size of native images. +The API combines both the imperative and the non-blocking reactive style of coding. +In the development mode, all files located in the `src/main/resources/templates` folder are watched for changes and modifications are immediately visible in your application. +Furthermore, Qute attempts to detect most of the template problems at build time and fail fast. +In this guide, you will find an <>, the description of the <> and <> details. +[[hello_world_example]] == Hello World Example In this example, we'd like to demonstrate the basic workflow when working with Qute templates. @@ -44,7 +49,7 @@ Engine engine = Engine.builder().addDefaults().build(); TIP: In Quarkus, there is a preconfigured `Engine` available for injection - see <>. -If we have an `Engine` instance we could parse the template contents: +Once we have an `Engine` instance we could parse the template contents: [source,java] ---- @@ -58,9 +63,10 @@ Finally, we will create a *template instance*, set the data and render the outpu [source,java] ---- // Renders

    Hello Jim!

    -helloTemplate.data("name", "Jim").render(); <1> +helloTemplate.data("name", "Jim").render(); <1> <2> ---- <1> `Template.data(String, Object)` is a convenient method that creates a template instance and sets the data in one step. +<2> `TemplateInstance.render()` triggers a synchronous rendering, i.e. the current thread is blocked until the rendering is finished. However, there are also asynchronous ways to trigger the rendering and consume the results. For example there is the `TemplateInstance.renderAsync()` method that returns `CompletionStage` or `TemplateInstance.createMulti()` that returns Mutiny's `Multi`. So the workflow is simple: @@ -69,65 +75,85 @@ So the workflow is simple: 3. Create template instance (`io.quarkus.qute.TemplateInstance`), 4. Render output. -The `Engine` is able to cache the definitions so that it's not necessary to parse the contents again and again. - -NOTE: In Quarkus, the caching is done automatically. +TIP: The `Engine` is able to cache the definitions so that it's not necessary to parse the contents again and again. In Quarkus, the caching is done automatically. +[[core_features]] == Core Features -=== Syntax and Building Blocks - -The dynamic parts of a template include: - -* *Comment* -** Starts with `{!` and ends with `!}`: `{! This is a comment !}`, -** Could be multi-line, -** May contain expressions and sections: `{! {#if true} !}`. -* *Expression* -** Outputs the evaluated value, -** Simple properties: `{foo}`, `{item.name}`, -** Virtual methods: `{item.get(name)}`, `{name ?: 'John'}`, -** With namespace: `{inject:colors}`. -* *Section* -** May contain expressions and sections: `{#if foo}{foo.name}{/if}`, -** The name in the closing tag is optional: `{#if active}ACTIVE!{/}`, -** Can be empty: `{#myTag image=true /}`, -** May declare nested section blocks: `{#if item.valid} Valid. {#else} Invalid. {/if}` and decide which block to render. -* *Unparsed Character Data* -** Starts with `{[` and ends with `]}`: `{[ ]}`, -** Could be multi-line, -** Used to mark the content that should be rendered but not parsed. - -=== Identifiers - -Expressions/tags must start with a curly bracket (`{`) followed by a valid identifier. -A valid identifier is a digit, an alphabet character, underscore (`_`), or a section command (`#`). -Expressions/tags starting with an invalid identifier are ignored. -A closing curly bracket (`}`) is ignored if not inside an expression/tag. +=== Basic Building Blocks + +The dynamic parts of a template include comments, expressions, sections and unparsed character data. + +==== Comments +A comment starts with `{!` and ends with `!}`, e.g. `{! This is a comment !}`. +It could be multi-line and may contain expressions and sections: `{! {#if true} !}`. +Of course, the content of a comment is completely ignored. + +[[blocks_expressions]] +==== Expressions +An <> outputs an evaluated value. +It consists of one or more parts. +A part may represent simple properties: `{foo}`, `{item.name}` or virtual methods: `{item.get(name)}`, `{name ?: 'John'}`. +An expression may start with a namespace: `{inject:colors}`. + +[[blocks_sections]] +==== Sections +A <> may contain text, expressions and nested sections: `{#if foo}{foo.name}{/if}`. +The name in the closing tag is optional: `{#if active}ACTIVE!{/}`. +It can be empty: `{#myTag image=true /}`. +A section may declare nested section blocks: `{#if item.valid} Valid. {#else} Invalid. {/if}` and decide which block to render. + +==== Unparsed Character Data +It is used to mark the content that should be rendered but not parsed. +It starts with `{|` and ends with `|}`: `{| |}`, and could be multi-line. + +NOTE: Previously, unparsed character data had to start with `{[` and end with `]}`. This syntax is still supported but we encourage users to switch to the new syntax to avoid some common collisions with constructs from other languages. + +[[identifiers]] +=== Identifiers and Tags + +Identifiers are used in expressions and section tags. +A valid identifier is a sequence of non-whitespace characters. +However, users are encouraged to only use valid Java identifiers in expressions. + +TIP: You can use bracket notation if you need to specify an identifier that contains a dot, e.g. `{map['my.key']}`. + +When parsing a template document the parser identifies all _tags_. +A tag starts and ends with a curly bracket, e.g. `{foo}`. +The content of a tag must start with: + +* a digit, or +* an alphabet character, or +* underscore, or +* a built-in command: `#`, `!`, `@`, `/`. + +If it does not start with any of the above it is ignored by the parser. -.hello.html +.Tag Examples [source,html] ---- - {_foo} <1> - { foo} <2> - {{foo}} <3> - {"foo":true} <4> + {_foo.bar} <1> + {! comment !}<2> + { foo} <3> + {{foo}} <4> + {"foo":true} <5> ---- -<1> Evaluated: expression starts with underscore. -<2> Ignored: expression starts with whitespace. -<3> Ignored: expression starts with `{`. -<4> Ignored: expression starts with `"`. +<1> Parsed: an expression that starts with underscore. +<2> Parsed: a comment +<3> Ignored: starts with whitespace. +<4> Ignored: starts with `{`. +<5> Ignored: starts with `"`. -TIP: It is also possible to use escape sequences `\{` and `\}` to insert delimiters in the text. In fact, an escape sequence is usually only needed for the start delimiter, ie. `\\{foo}` will be rendered as `{foo}` (no evaluation will happen). +TIP: It is also possible to use escape sequences `\{` and `\}` to insert delimiters in the text. In fact, an escape sequence is usually only needed for the start delimiter, ie. `\\{foo}` will be rendered as `{foo}` (no parsing/evaluation will happen). === Removing Standalone Lines From the Template -By default, Qute parser removes standalone lines from the template output. -A *standalone line* is a line that contains at least one section tag (e.g. `{#each}` and `{/each}`) or parameter declaration (e.g. `{@org.acme.Foo foo}`) but no expression and non-whitespace character. +By default, the parser removes standalone lines from the template output. +A *standalone line* is a line that contains at least one section tag (e.g. `{#each}` and `{/each}`), parameter declaration (e.g. `{@org.acme.Foo foo}`) or comment but no expression and no non-whitespace character. In other words, a line that contains no section tag or a parameter declaration is *not* a standalone line. Likewise, a line that contains an _expression_ or a _non-whitespace character_ is *not* a standalone line. @@ -163,7 +189,7 @@ Likewise, a line that contains an _expression_ or a _non-whitespace character_ i ---- -The default behavior can be disabled by setting the property `quarkus.qute.remove-standalone-lines` to `false`. +TIP: In Quarkus, the default behavior can be disabled by setting the property `quarkus.qute.remove-standalone-lines` to `false`. In this case, all whitespace characters from a standalone line will be printed to the output. .Output with `quarkus.qute.remove-standalone-lines=false` @@ -181,66 +207,83 @@ In this case, all whitespace characters from a standalone line will be printed t ---- -==== Expressions - -An expression consists of: - -* an optional namespace followed by a colon (`:`), -* one or more parts separated by dot (`.`). - -The first part of the expression is always resolved against the <>. -If no result is found for the first part it's resolved against the parent context object (if available). -For an expression that starts with a namespace the current context object is found using all the available ``NamespaceResolver``s. -For an expression that does not start with a namespace the current context object is *derived from the position* of the tag. -All other parts are resolved using ``ValueResolver``s against the result of the previous resolution. +[[expressions]] +=== Expressions -For example, expression `{name}` has no namespace and single part - `name`. -The "name" will be resolved using all available value resolvers against the current context object. -However, the expression `{global:colors}` has the namespace `global` and single part - `colors`. -First, all available ``NamespaceResolver``s will be used to find the current context object. -And afterwards value resolvers will be used to resolve "colors" against the context object found. +An expression outputs a value. +It consists of one or more parts separated by dot (dot notation) or square brackets (bracket notation). +In the `object.property` (dot notation) syntax, the `property` must be a <>. +In the `object[property_name]` (bracket notation) syntax, the `property_name` has to be a non-null <> value. +An expression could start with an optional namespace followed by a colon (`:`). +.Expressions Example [source] ---- {name} <1> {item.name} <2> -{global:colors} <3> +{item['name']} <3> +{global:colors} <4> ---- -<1> no namespace, one part -`name` -<2> no namespace, two parts - `item`, `name` -<3> namespace `global`, one part - `colors` +<1> no namespace, one part: `name` +<2> no namespace, two parts: `item`, `name` +<3> equivalent to `{item.name}` but using the bracket notation +<4> namespace `global`, one part: `colors` -An expression part could be a "virtual method" in which case the name can be followed by a list of comma-separated parameters in parentheses: +A part of an expression could be a _virtual method_ in which case the name can be followed by a list of comma-separated parameters in parentheses. +A parameter of a virtual method can be either a nested expression or a <> value. +We call it "virtual" because it does not have to be backed by a real Java method. +.Virtual Methods Example [source] ---- {item.getLabels(1)} <1> {name or 'John'} <2> ---- <1> no namespace, two parts - `item`, `getLabels(1)`, the second part is a virtual method with name `getLabels` and params `1` -<2> infix notation, translated to `name.or('John')`; no namespace, two parts - `name`, `or('John')` +<2> infix notation that can be used for virtual methods with single parameter, translated to `name.or('John')`; no namespace, two parts - `name`, `or('John')` -[[current_context_object]] -===== Current Context +[[literals]] +==== Supported Literals -If an expression does not specify a namespace the current context object is derived from the position of the tag. -By default, the current context object represents the data passed to the template instance. -However, sections may change the current context object. -A typical example is the for/each loop - during iteration the content of the section is rendered with each element as the current context object: +|=== +|Literal |Examples -[source] ----- -{#each items} - {count}. {it.name} <1> -{/each} +|boolean +|`true`, `false` -{! Another form of iteration... !} -{#for item in items} - {count}. {item.name} <2> -{/for} ----- -<1> `it` is an implicit alias. `name` is resolved against the current iteration element. -<2> Loop with an explicit alias `item`. +|null +|`null` + +|string +|`'value'`, `"string"` + +|integer +|`1`, `-5` + +|long +|`1l`, `-5L` + +|double +|`1D`, `-5d` + +|float +|`1f`, `-5F` + +|=== + +==== Resolution + +The first part of the expression is always resolved against the <>. +If no result is found for the first part it's resolved against the parent context object (if available). +For an expression that starts with a namespace the current context object is found using all the available ``NamespaceResolver``s. +For an expression that does not start with a namespace the current context object is *derived from the position* of the tag. +All other parts of an expression are resolved using all ``ValueResolver``s against the result of the previous resolution. + +For example, expression `{name}` has no namespace and single part - `name`. +The "name" will be resolved using all available value resolvers against the current context object. +However, the expression `{global:colors}` has the namespace `global` and single part - `colors`. +First, all available ``NamespaceResolver``s will be used to find the current context object. +And afterwards value resolvers will be used to resolve "colors" against the context object found. [TIP] ==== @@ -269,27 +312,96 @@ This could be useful to access data for which the key is overridden: ==== -===== Built-in Operators +[[current_context_object]] +==== Current Context + +If an expression does not specify a namespace the _current context object_ is derived from the position of the tag. +By default, the current context object represents the data passed to the template instance. +However, sections may change the current context object. +A typical example is the `with` section that could be used to set the current context object in order to simplify the template structure: + +[source,html] +---- +{#with item} +

    {name}

    <1> +

    {description}

    +{/with} +---- +<1> `name` is resolved against the `item`. + +Another built-in section that modifies the current context object is `let`/`set`: +[source,html] +---- +{#let myParent=order.item.parent myPrice=order.price} <1> +

    {myParent.name}

    +

    Price: {myPrice}

    +{/let} +---- +<1> The current context object inside the section is the map of resolved parameters. + +NOTE: The current context can be accessed via the implicit binding `this`. + +==== Built-in Resolvers |=== -|Operator |Description |Examples +|Name |Description |Examples -|Elvis +|Elvis Operator |Outputs the default value if the previous part cannot be resolved or resolves to `null`. -|`{person.name ?: 'John'}`, `{person.name or 'John'}` +|`{person.name ?: 'John'}`, `{person.name or 'John'}`, `{person.name.or('John')}` + +|orEmpty +|Outputs an empty list if the previous part cannot be resolved or resolves to `null`. +|`{#for pet in pets.orEmpty}{pet.name}{/for}` -|Ternary +|Ternary Operator |Shorthand for if-then-else statement. Unlike in <> nested operators are not supported. |`{item.isActive ? item.name : 'Inactive item'}` outputs the value of `item.name` if `item.isActive` resolves to `true`. + +|Logical AND Operator +|Outputs `true` if both parts are not `falsy` as described in the <>. The parameter is only evaluated if needed. +|`{person.isActive && person.hasStyle}` + +|Logical OR Operator +|Outputs `true` if any of the parts is not `falsy` as described in the <>. The parameter is only evaluated if needed. +|`{person.isActive \|\| person.hasStyle}` + |=== TIP: The condition in a ternary operator evaluates to `true` if the value is not considered `falsy` as described in the <>. -NOTE: In fact, the operators are implemented as "virtual methods" that consume one parameter and can be used with infix notation, i.e. `{person.name or 'John'}` is translated to `{person.name.or('John')}`. +NOTE: In fact, the operators are implemented as "virtual methods" that consume one parameter and can be used with infix notation. For example `{person.name or 'John'}` is translated to `{person.name.or('John')}` and `{item.isActive ? item.name : 'Inactive item'}` is translated to `{item.isActive.ifTruthy(item.name).or('Inactive item')}` + +==== Arrays + +You can iterate over elements of an array with the <>. Moreover, it's also possible to get the length of the specified array and access the elements directly via an index value. + +.Array Examples +[source,html] +---- +

    Array of length: {myArray.length}

    <1> +
      +
    • First: {myArray.0}
    • <2> +
    • Second: {myArray[1]}
    • <3> +
    • Third: {myArray.get(2)}
    • <4> +
    +
      + {#for element in myArray} +
    1. {element}
    2. + {/for} +
    +---- +<1> Outputs the length of the array. +<2> Outputs the first element of the array. +<3> Outputs the second element of the array using the bracket notation. +<4> Outputs the third element of the array via the virtual method `get()`. + +==== Character Escapes + +For HTML and XML templates the `'`, `"`, `<`, `>`, `&` characters are escaped by default if a template variant is set. -===== Character Escapes +NOTE: In Quarkus, a variant is set automatically for templates located in the `src/main/resources/templates`. By default, the `java.net.URLConnection#getFileNameMap()` is used to determine the content type of a template file. The additional map of suffixes to content types can be set via `quarkus.qute.content-types`. -For HTML and XML templates the `'`, `"`, `<`, `>`, `&` characters are escaped by default. If you need to render the unescaped value: 1. Use the `raw` or `safe` properties implemented as extension methods of the `java.lang.Object`, @@ -305,7 +417,7 @@ If you need to render the unescaped value: <1> `title` that resolves to `Expressions & Escapes` will be rendered as `Expressions &amp; Escapes` <2> `paragraph` that resolves to `

    My text!

    ` will be rendered as `

    My text!

    ` -===== Virtual Methods +==== Virtual Methods A virtual method is a *part of an expression* that looks like a regular Java method invocation. It's called "virtual" because it does not have to match the actual method of a Java class. @@ -330,7 +442,7 @@ class Item { } ---- -NOTE: Virtual methods are usually evaluated by value resolvers generated for <>, <> or classes used in <>. However, a custom value resolver that is not backed by any Java class/method can be registered as well. +NOTE: Virtual methods are usually evaluated by value resolvers generated for <>, <> or classes used in <>. However, a custom value resolver that is not backed by any Java class/method can be registered as well. A virtual method with single parameter can be called using the infix notation: @@ -354,9 +466,15 @@ Virtual method parameters can be "nested" virtual method invocations. ---- <1> `item.calculateDiscount(10)` is evaluated first and then passed as an argument to `item.subtractPrice()`. +==== Evaluation of `CompletionStage` and `Uni` Objects +Objects that implement `java.util.concurrent.CompletionStage` and `io.smallrye.mutiny.Uni` are evaluated in a special way. +If a part of an expression resolves to a `CompletionStage`, the resolution continues once this stage is completed and the next part of the expression (if any) is evaluated against the result of the completed stage. +For example, if there is an expression `{foo.size}` and `foo` resolves to `CompletionStage>` then `size` is resolved against the completed result, i.e. `List`. +If a part of an expression resolves to a `Uni`, a `CompletionStage` is first created from `Uni` using `Uni#subscribeAsCompletionStage()` and then evaluated as described above. -==== Sections +[[sections]] +=== Sections A section: @@ -385,7 +503,8 @@ A section helper that defines the logic of a section can "execute" any of the bl {/if} ---- -===== Loop Section +[[loop_section]] +==== Loop Section The loop section makes it possible to iterate over an instance of `Iterable`, `Map` 's entry set, `Stream` and an Integer. It has two flavors. @@ -435,7 +554,7 @@ The output will be: ---- [[if_section]] -===== If Section +==== If Section The `if` section represents a basic control flow section. The simplest possible version accepts a single parameter and renders the content if the condition is evaluated to `true`. @@ -535,7 +654,99 @@ You can also add any number of `else` blocks: {/if} ---- -===== With Section +[[when_section]] +==== When/Switch Section + +This section is similar to Java's `switch` or Kotlin's `when` constructs. +It matches a _tested value_ against all blocks sequentially until a condition is satisfied. +The first matching block is executed. +All other blocks are ignored (this behavior differs to the Java `switch` where a `break` statement is necessary). + +.Example using the `when`/`is` name aliases +[source] +---- +{#when items.size} + {#is 1} <1> + There is exactly one item! + {#is > 10} <2> + There are more than 10 items! + {#else} <3> + There are 2 -10 items! +{/when} +---- +<1> If there is exactly one parameter it's tested for equality. +<2> It's possible to use <> to specify the matching logic. Unlike in the <> nested operators are not supported. +<3> `else` is block is executed if no other block matches the value. + +.Example using the `switch`/`case` name aliases +[source] +---- +{#switch person.name} + {#case 'John'} <1> + Hey John! + {#case 'Mary'} + Hey Mary! +{/switch} +---- +<1> `case` is an alias for `is`. + +A tested value that resolves to an enum is handled specifically. +The parameters of an `is`/`case` block are not evaluated as expressions but compared with the result of `toString()` invocation upon the tested value. + +[source] +---- +{#when machine.status} + {#is ON} + It's running. <1> + {#is in OFF BROKEN} + It's broken or OFF. <2> +{/when} +---- +<1> This block is executed if `machine.status.toString().equals("ON")`. +<2> This block is executed if `machine.status.toString().equals("OFF")` or `machine.status.toString().equals("BROKEN")`. + +NOTE: An enum constant is validated if the tested value has a type information available and resolves to an enum type. + +The following operators are supported in `is`/`case` block conditions: + +[[when_operators]] + +|=== +|Operator |Aliases |Example + +|not equal +|`!=`, `not`, `ne` +|`{#is not 10}`,`{#case != 10}` + +|greater than +|`gt`, `>` +|`{#case le 10}` + +|greater than or equal to +|`ge`, `>=` +|`{#is >= 10}` + +|less than +|`lt`, `<` +|`{#is < 10}` + +|less than or equal to +|`le`, `\<=` +|`{#case le 10}` + +|in +|`in` +|`{#is in 'foo' 'bar' 'baz'}` + +|not in +|`ni`,`!in` +|`{#is !in 1 2 3}` + +|=== + + +[[with_section]] +==== With Section This section can be used to set the current context object. This could be useful to simplify the template structure: @@ -564,15 +775,18 @@ This section might also come in handy when we'd like to avoid multiple expensive ---- <1> `this` is the result of `item.callExpensiveLogicToGetTheValue(1,'foo',bazinga)`. The method is only invoked once even though the result may be used in multiple expressions. -===== Let/Set Section +==== Let/Set Section This section allows you to define named local variables: [source,html] ---- -{#let myParent=order.item.parent} +{#let myParent=order.item.parent isActive=false age=10} <1>

    {myParent.name}

    + Is active: {isActive} + Age: {age} {/let} ---- +<1> The local variable is initialized with an expression that can also represent a <>. The section tag is also registered under the `set` alias: @@ -586,9 +800,27 @@ The section tag is also registered under the `set` alias: [[include_helper]] -===== Include/Insert Sections +==== Include Section + +This section can be used to include another template and possibly override some parts of the template (template inheritance). + +.Simple Example +[source,html] +---- + + + +Simple Include + + + {#include foo limit=10 /} <1><2> + + +---- +<1> Include a template with id `foo`. The included template can reference data from the current context. +<2> It's also possible to define optional parameters that can be used in the included template. -These sections can be used to include another template and possibly override some parts of the template (template inheritance). +Template inheritance makes it possible to reuse template layouts. .Template "base" [source,html] @@ -624,7 +856,7 @@ These sections can be used to include another template and possibly override som NOTE: Section blocks can also define an optional end tag - `{/title}`. [[user_tags]] -===== User-defined Tags +==== User-defined Tags User-defined tags can be used to include a template and optionally pass some parameters. Let's suppose we have a template called `itemDetail.html`: @@ -668,8 +900,66 @@ We can include the tag like this: <1> `item` is resolved to an iteration element and can be referenced using the `it` key in the tag template. <2> Tag content injected using the `nested-content` key in the tag template. +=== Rendering Output + +`TemplateInstance` provides several ways to trigger the rendering and consume the result. +The most straightforward approach is represented by `TemplateInstance.render()`. +This method triggers a synchronous rendering, i.e. the current thread is blocked until the rendering is finished, and returns the output. +By contrast, `TemplateInstance.renderAsync()` returns a `CompletionStage` which is completed when the rendering is finished. + +.`TemplateInstance.renderAsync()` Example +[source,java] +---- +template.data(foo).renderAsync().whenComplete((result, failure) -> { <1> + if (failure == null) { + // consume the output... + } else { + // process failure... + } +}; +---- +<1> Register a callback that is executed once the rendering is finished. + +There are also two methods that return https://smallrye.io/smallrye-mutiny/[Mutiny] types. +`TemplateInstance.createUni()` returns a new `Uni` object. +If you call `createUni()` the template is not rendered right away. +Instead, every time `Uni.subscribe()` is called a new rendering of the template is triggered. + +.`TemplateInstance.createUni()` Example +[source,java] +---- +template.data(foo).createUni().subscribe().with(System.out::println); +---- + +`TemplateInstance.createMulti()` returns a new `Multi` object. +Each item represents a part/chunk of the rendered template. +Again, `createMulti()` does not trigger rendering. +Instead, every time a computation is triggered by a subscriber the template is rendered again. + +.`TemplateInstance.createMulti()` Example +[source,java] +---- +template.data(foo).createMulti().subscribe().with(buffer:append,buffer::flush); +---- + +NOTE: The template rendering is divided in two phases. During the first phase, which is asynchronous, all expressions in the template are resolved and a _result tree_ is built. In the second phase, which is synchronous, the result tree is _materialized_, i.e. one by one the result nodes emit chunks that are consumed/buffered by the specific consumer. + === Engine Configuration +==== Value Resolvers + +Value resolvers are used when evaluating expressions. +A custom `io.quarkus.qute.ValueResolver` can be registered programmatically via `EngineBuilder.addValueResolver()`. + +.`ValueResolver` Builder Example +[source,java] +---- +engineBuilder.addValueResolver(ValueResolver.builder() + .appliesTo(ctx -> ctx.getBase() instanceof Long && ctx.getName().equals("tenTimes")) + .resolveSync(ctx -> (Long) ctx.getBase() * 10) + .build()); +---- + ==== Template Locator Manual registration is sometimes handy but it's also possible to register a template locator using `EngineBuilder.addLocator(Function>)`. @@ -677,6 +967,22 @@ This locator is used whenever the `Engine.getTemplate()` method is called and th NOTE: In Quarkus, all templates from the `src/main/resources/templates` are located automatically. +==== Content Filters + +Content filters can be used to modify the template contents before parsing. + +.Content Filter Example +[source,java] +---- +engineBuilder.addParserHook(new ParserHook() { + @Override + public void beforeParsing(ParserHelper parserHelper) { + parserHelper.addContentFilter(contents -> contents.replace("${", "$\\{")); <1> + } +}); +---- +<1> Escape all occurences of `${`. + [[quarkus_integration]] == Quarkus Integration @@ -697,24 +1003,42 @@ Moreover, all templates located in the `src/main/resources/templates` directory ---- import io.quarkus.qute.Engine; import io.quarkus.qute.Template; -import io.quarkus.qute.api.ResourcePath; +import io.quarkus.qute.Location; class MyBean { @Inject Template items; <1> - @ResourcePath("detail/items2_v1.html") <2> + @Location("detail/items2_v1.html") <2> Template items2; @Inject Engine engine; <3> } ---- -<1> If there is no `ResourcePath` qualifier provided, the field name is used to locate the template. In this particular case, the container will attempt to locate a template with path `src/main/resources/templates/items.html`. -<2> The `ResourcePath` qualifier instructs the container to inject a template from a path relative from `src/main/resources/templates`. In this case, the full path is `src/main/resources/templates/detail/items2_v1.html`. +<1> If there is no `Location` qualifier provided, the field name is used to locate the template. In this particular case, the container will attempt to locate a template with path `src/main/resources/templates/items.html`. +<2> The `Location` qualifier instructs the container to inject a template from a path relative from `src/main/resources/templates`. In this case, the full path is `src/main/resources/templates/detail/items2_v1.html`. <3> Inject the configured `Engine` instance. +It's also possible to contribute to the engine configuration via a CDI observer method. + +.`EngineBuilder` Observer Example +[source,java] +---- +import io.quarkus.qute.EngineBuilder; + +class MyBean { + + void configureEngine(@Observes EngineBuilder builder) { + builder.addValueResolver(ValueResolver.builder() + .appliesTo(ctx -> ctx.getBase() instanceof Long && ctx.getName().equals("tenTimes")) + .resolveSync(ctx -> (Long) ec.getBase() * 10) + .build()); + } +} +---- + === Template Variants Sometimes it's useful to render a specific variant of the template based on the content negotiation. @@ -753,13 +1077,19 @@ For the expression `inject:foo.price` the implementation class of the injected b NOTE: A `ValueResolver` is also generated for all beans annotated with `@Named` so that it's possible to access its properties without reflection. -[[parameter_declarations]] -=== Parameter Declarations +[[typesafe_expressions]] +=== Type-safe Expressions + +Template expressions can be optionally type-safe. +Which means that an expression is validated against the existing Java types and template extension methods. +If an invalid/incorrect expression is found then the build fails. + +For example, if there is an expression `item.name` where `item` maps to `org.acme.Item` then `Item` must have a property `name` or a matching template extension method must exist. -It is possible to specify optional parameter declarations in a template. -Quarkus attempts to validate all expressions that reference such parameters. -If an invalid/incorrect expression is found the build fails. +An optional _parameter declaration_ is used to bind a Java type to expressions whose first part matches the parameter name. +Parameter declarations are specified directly in a template. +.Parameter Declaration Example [source,html] ---- {@org.acme.Foo foo} <1> @@ -771,17 +1101,20 @@ If an invalid/incorrect expression is found the build fails.

    {title}

    <2> - Hello {foo.message}! <3> + Hello {foo.message.toLowerCase}! <3> <4> ---- <1> Parameter declaration - maps `foo` to `org.acme.Foo`. <2> Not validated - not matching a param declaration. <3> This expression is validated. `org.acme.Foo` must have a property `message` or a matching template extension method must exist. +<4> Likewise, the Java type of the object resolved from `foo.message` must have a property `toLowerCase` or a matching template extension method must exist. -NOTE: A value resolver is also generated for all types used in parameter declarations so that it's possible to access its properties without reflection. +IMPORTANT: A value resolver is automatically generated for all types used in parameter declarations so that it's possible to access its properties without reflection. -==== Overriding Parameter Declarations +TIP: Method parameters of <> are automatically turned into parameter declarations. + +Note that sections can override names that would otherwise match a parameter declaration: [source,html] ---- @@ -802,27 +1135,123 @@ NOTE: A value resolver is also generated for all types used in parameter declara ---- <1> Validated against `org.acme.Foo`. <2> Not validated - `foo` is overridden in the loop section. + +[[typesafe_templates]] +=== Type-safe Templates +You can also declare your templates in your Java code. +If using <>, you can rely on the following convention: + +- Organise your template files in the `/src/main/resources/templates` directory, by grouping them into one directory per resource class. So, if + your `ItemResource` class references two templates `hello` and `goodbye`, place them at `/src/main/resources/templates/ItemResource/hello.txt` + and `/src/main/resources/templates/ItemResource/goodbye.txt`. Grouping templates per resource class makes it easier to navigate to them. +- In each of your resource class, declare a `@CheckedTemplate static class Template {}` class within your resource class. +- Declare one `public static native TemplateInstance method();` per template file for your resource. +- Use those static methods to build your template instances. + +.ItemResource.java +[source,java] +---- +package org.acme.quarkus.sample; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import io.quarkus.qute.TemplateInstance; +import io.quarkus.qute.Template; +import io.quarkus.qute.CheckedTemplate; + +@Path("item") +public class ItemResource { + + @CheckedTemplate + public static class Templates { + public static native TemplateInstance item(Item item); <1> <2> + } + + @GET + @Path("{id}") + @Produces(MediaType.TEXT_HTML) + public TemplateInstance get(@PathParam("id") Integer id) { + return Templates.item(service.findItem(id)); <3> + } +} +---- +<1> Declare a method that gives us a `TemplateInstance` for `templates/ItemResource/item.html` and declare its `Item item` parameter so we can validate the template. +<2> The `item` parameter is automatically turned into a <> and so all expressions that reference this name will be validated. +<3> Make the `Item` object accessible in the template. + +You can also declare a top-level Java class annotated with `@CheckedTemplate`: + +.Top-level checked templates +[source,java] +---- +package org.acme.quarkus.sample; + +import io.quarkus.qute.TemplateInstance; +import io.quarkus.qute.Template; +import io.quarkus.qute.CheckedTemplate; + +@CheckedTemplate +public class Templates { + public static native TemplateInstance hello(String name); <1> +} +---- +<1> This declares a template with path `templates/hello.txt`. The `name` parameter is automatically turned into a <> and so all expressions that reference this name will be validated. + +Then declare one `public static native TemplateInstance method();` per template file. +Use those static methods to build your template instances: + +.HelloResource.java +[source,java] +---- +package org.acme.quarkus.sample; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.QueryParam; +import io.quarkus.qute.TemplateInstance; + +@Path("hello") +public class HelloResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public TemplateInstance get(@QueryParam("name") String name) { + return Templates.hello(name); + } +} +---- [[template_extension_methods]] === Template Extension Methods -Extension methods can be used to extend the data classes with new functionality. -For example, it is possible to add "computed properties" and "virtual methods". +Extension methods can be used to extend the data classes with new functionality (to extend the set of accessible properties and methods) or to resolve expressions for a specific <>. +For example, it is possible to add _computed properties_ and _virtual methods_. + A value resolver is automatically generated for a method annotated with `@TemplateExtension`. -If declared on a class a value resolver is generated for every non-private method declared on the class. +If a class is annotated with `@TemplateExtension` then a value resolver is generated for every _non-private static method_ declared on the class. +Method-level annotations override the behavior defined on the class. Methods that do not meet the following requirements are ignored. A template extension method: +* must not be `private` * must be static, -* must not return `void`, -* must accept at least one parameter. - -The class of the first parameter is always used to match the base object. +* must not return `void`. + +If there is no namespace defined the class of the first parameter that is not annotated with `@TemplateAttribute` is used to match the base object. Otherwise the namespace is used to match an expression. + The method name is used to match the property name by default. However, it is possible to specify the matching name with `TemplateExtension#matchName()`. - -NOTE: A special constant - `ANY` - may be used to specify that the extension method matches any name. In that case, the method must declare at least two parameters and the second parameter must be a string. +A special constant - `TemplateExtension#ANY` - may be used to specify that the extension method matches any name. +It is also possible to match the name against a regular expression specified in `TemplateExtension#matchRegex()`. +In both cases, an additional string method parameter must be used to pass the property name. +If both `matchName()` and `matchRegex()` are set the regular expression is used for matching. .Extension Method Example [source,java] @@ -858,15 +1287,17 @@ This template extension method makes it possible to render the following templat ==== Method Parameters -An extension method may accept multiple parameters. -The first parameter is always used to pass the base object, ie. `org.acme.Item` in the previous example. -Other parameters are resolved when rendering the template and passed to the extension method. +An extension method may declare parameters. +If no namespace is specified then the first parameter that is not annotated with `@TemplateAttribute` is used to pass the base object, i.e. `org.acme.Item` in the first example. +If matching any name or using a regular expression then a string method parameter needs to be used to pass the property name. +Parameters annotated with `@TemplateAttribute` are obtained via `TemplateInstance#getAttribute()`. +All other parameters are resolved when rendering the template and passed to the extension method. .Multiple Parameters Example [source,java] ---- @TemplateExtension -class MyExtensions { +class BigDecimalExtensions { static BigDecimal scale(BigDecimal val, int scale, RoundingMode mode) { <1> return val.setScale(scale, mode); @@ -881,11 +1312,44 @@ class MyExtensions { ---- <1> `item.discountedPrice` is resolved to an instance of `BigDecimal`. -==== Built-in Template Extension methods +[[namespace_extension_methods]] +==== Namespace Extension Methods + +If `TemplateExtension#namespace()` is specified then the extension method is used to resolve expressions with the given <>. +Template extension methods that share the same namespace are grouped in one resolver ordered by `TemplateExtension#priority()`. +The first matching extension method is used to resolve an expression. + +.Namespace Extension Method Example +[source,java] +---- +@TemplateExtension(namespace = "str") +public static class StringExtensions { + + static String format(String fmt, Object... args) { + return String.format(fmt, args); + } + + static String reverse(String val) { + return new StringBuilder(val).reverse().toString(); + } +} +---- + +These extension methods can be used as follows. + +[source,html] +---- +{str:format('%s %s!','Hello', 'world')} <1> +{str:reverse('hello')} <2> +---- +<1> The output is `Hello world!` +<2> The output is `olleh` + +==== Built-in Template Extension Quarkus provides a set of built-in extension methods. -===== Map extension methods: +===== Maps * `keys` or `keySet`: Returns a Set view of the keys contained in a map ** `{#for key in map.keySet}` @@ -900,18 +1364,50 @@ Quarkus provides a set of built-in extension methods. ** `{#if map.isEmpty}` * `get(key)`: Returns the value to which the specified key is mapped -** `{map.get('foo')}` or `{map['foo']}` +** `{map.get('foo')}` + +TIP: A map value can be also accessed directly: `{map.myKey}`. Use the bracket notation for keys that are not legal identifiers: `{map['my key']}`. -===== Collection extension methods: +===== Collections * `get(index)`: Returns the element at the specified position in a list -** `{list.get(0)}` or `{list[0]}` +** `{list.get(0)}` -===== Number extension methods: +TIP: A list element can be accessed directly: `{list.10}` or `{list[10]}`. + +===== Numbers * `mod`: Modulo operation ** `{#if counter.mod(5) == 0}` +===== Config + +* `config:` or `config:[]`: Returns the config value for the given property name +** `{config:foo}` or `{config:['property.with.dot.in.name']}` + +* `config:property(name)`: Returns the config value for the given property name; the name can be obtained dynamically by an expression +** `{config:property('quarkus.foo')}` or `{config:property(foo.getPropertyName())}` + +===== Time + +* `format(pattern)`: Formats temporal objects from the `java.time` package +** `{dateTime.format('d MMM uuuu')}` + +* `format(pattern,locale)`: Formats temporal objects from the `java.time` package +** `{dateTime.format('d MMM uuuu',myLocale)}` + +* `format(pattern,locale,timeZone)`: Formats temporal objects from the `java.time` package +** `{dateTime.format('d MMM uuuu',myLocale,myTimeZoneId)}` + +* `time:format(dateTime,pattern)`: Formats temporal objects from the `java.time` package, `java.util.Date`, `java.util.Calendar` and `java.lang.Number` +** `{time:format(myDate,'d MMM uuuu')}` + +* `time:format(dateTime,pattern,locale)`: Formats temporal objects from the `java.time` package, `java.util.Date`, `java.util.Calendar` and `java.lang.Number` +** `{time:format(myDate,'d MMM uuuu', myLocale)}` + +* `time:format(dateTime,pattern,locale,timeZone)`: Formats temporal objects from the `java.time` package, `java.util.Date`, `java.util.Calendar` and `java.lang.Number` +** `{time:format(myDate,'d MMM uuuu',myTimeZoneId)}` + [[template_data]] === @TemplateData @@ -977,8 +1473,8 @@ class Item { [[resteasy_integration]] === RESTEasy Integration -If you want to use Qute in your JAX-RS application, you'll need to add the `quarkus-resteasy-qute` extension first. -In your `pom.xml` file, add: +If you want to use Qute in your JAX-RS application, then depending on which JAX-RS stack you are using, you'll need to register the proper extension first. +If you are using the traditional `quakus-resteasy` extension, then in your `pom.xml` file, add: [source,xml] ---- @@ -988,8 +1484,19 @@ In your `pom.xml` file, add:
    ---- -This extension registers a special `ContainerResponseFilter` implementation so that a resource method can return a `TemplateInstance` and the filter takes care of all necessary steps. -A simple JAX-RS resource may look like this: +If instead you are using RESTEasy Reactive via the `quarkus-resteasy-reactive` extension, then in your `pom.xml` file, add: + +[source,xml] +---- + + io.quarkus + quarkus-resteasy-reactive-qute + +---- + +Both of these extensions register a special `ContainerResponseFilter` implementation which enables resource methods to return a `TemplateInstance`, thus freeing users of having to take care of all necessary internal steps. + +The end result is that a using Qute within a JAX-RS resource may look as simple as: .HelloResource.java [source,java] @@ -1017,10 +1524,11 @@ public class HelloResource { } } ---- -<1> If there is no `@ResourcePath` qualifier provided, the field name is used to locate the template. In this particular case, we're injecting a template with path `templates/hello.txt`. +<1> If there is no `@Location` qualifier provided, the field name is used to locate the template. In this particular case, we're injecting a template with path `templates/hello.txt`. <2> `Template.data()` returns a new template instance that can be customized before the actual rendering is triggered. In this case, we put the name value under the key `name`. The data map is accessible during rendering. <3> Note that we don't trigger the rendering - this is done automatically by a special `ContainerResponseFilter` implementation. +TIP: Users are encouraged to use <> that help to organize the templates for a specific JAX-RS resource and enable <> automatically. The content negotiation is performed automatically. The resulting output depends on the `Accept` header received from the client. @@ -1043,6 +1551,24 @@ class DetailResource { <1> Inject a variant template with base path derived from the injected field - `src/main/resources/templates/item`. <2> For `text/plain` the `src/main/resources/templates/item.txt` template is used. For `text/html` the `META-INF/resources/templates/item.html` template is used. +The `RestTemplate` util class can be used to obtain a template instance from a body of a JAX-RS resource method: + +.RestTemplate Example +[source,java] +---- +@Path("/detail") +class DetailResource { + + @GET + @Produces({ MediaType.TEXT_HTML, MediaType.TEXT_PLAIN }) + public TemplateInstance item() { + return RestTemplate.data("myItem", new Item("Alpha", 1000)); <1> + } +} +---- +<1> The name of the template is derived from the resource class and method name; `DetailResource/item` in this particular case. + +WARNING: Unlike with `@Inject` the templates obtained via `RestTemplate` are not validated, i.e. the build does not fail if a template does not exist. === Development Mode @@ -1059,7 +1585,17 @@ Subsequently, the bundles can be used at runtime: 1. Directly in your code via `io.quarkus.qute.i18n.MessageBundles#get()`; e.g. `MessageBundles.get(AppMessages.class).hello_name("Lucie")` 2. Injected in your beans via `@Inject`; e.g. `@Inject AppMessages` -3. Referenced in the templates via the message bundle name; e.g. `{msg:hello_name('Lucie')}` +3. Referenced in the templates via the message bundle namespace: ++ +[source,html] +---- + {msg:hello_name('Lucie')} <1> <2> <3> + {msg:message(myKey,'Lu')} <4> +---- +<1> `msg` is the default namespace. +<2> `hello_name` is the message key. +<3> `Lucie` is the parameter of the message bundle interface method. +<4> It is also possible to obtain a localized message for a key resolved at runtime using a reserved key `message`. The validation is skipped in this case though. .Message Bundle Interface Example [source,java] @@ -1103,7 +1639,7 @@ NOTE: A warning message is logged for each _unused_ parameter. The default locale of the Java Virtual Machine used to *build the application* is used for the `@MessageBundle` interface by default. However, the `io.quarkus.qute.i18n.MessageBundle#locale()` can be used to specify a custom locale. -Additionaly, there are two ways to define a localized bundle: +Additionally, there are two ways to define a localized bundle: 1. Create an interface that extends the default interface that is annotated with `@Localized` 2. Create an UTF-8 encoded file located in `src/main/resources/messages`; e.g. `msg_de.properties`. @@ -1117,7 +1653,7 @@ import io.quarkus.qute.i18n.Localized; import io.quarkus.qute.i18n.Message; @Localized("de") <1> -public interface GermanAppMessages { +public interface GermanAppMessages extends AppMessages { @Override @Message("Hallo {name}!") <2> @@ -1143,6 +1679,47 @@ hello_name=Hallo {name}! <1> <2> <1> Each line in a localized file represents a message template. <2> Keys and values are separated by the equals sign. +Once we have the localized bundles defined we need a way to _select_ a correct bundle. +If you use a message bundle expression in a template you'll have to specify the `locale` attribute of a template instance. + +.`locale` Attribute Example +[source,java] +---- +@Singleton +public class MyBean { + + @Inject + Template hello; + + String render() { + return hello.instance().setAttribute("locale", Locale.forLanguageTag("cs")).render(); <1> + } +} +---- +<1> You can set a `Locale` instance or a locale tag string (IETF). + + +NOTE: When using <> the `locale` attribute is derived from the the `Accept-Language` header if not set by a user. + +The `@Localized` qualifier can be used to inject a localized message bundle interface. + +.Injected Localized Message Bundle Example +[source,java] +---- +@Singleton +public class MyBean { + + @Localized("cs") <1> + AppMessages msg; + + String render() { + return msg.hello_name("Jachym"); + } +} +---- +<1> The annotation value is a locale tag string (IETF). + + === Configuration Reference -include::{generated-dir}/config/quarkus-qute.adoc[leveloffset=+1, opts=optional] \ No newline at end of file +include::{generated-dir}/config/quarkus-qute.adoc[leveloffset=+1, opts=optional] diff --git a/docs/src/main/asciidoc/qute.adoc b/docs/src/main/asciidoc/qute.adoc index 3061e6003d63f..a53ab455466ae 100644 --- a/docs/src/main/asciidoc/qute.adoc +++ b/docs/src/main/asciidoc/qute.adoc @@ -1,10 +1,9 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Qute Templating Engine -:extension-status: experimental include::./attributes.adoc[] @@ -15,11 +14,9 @@ In the development mode, all files located in `src/main/resources/templates` are Furthermore, we try to detect most of the template problems at build time. In this guide, you will learn how to easily render templates in your application. -include::./status-include.adoc[] - == Hello World with JAX-RS -If you want to use Qute in your JAX-RS application, you need to add the `quarkus-qute-resteasy` extension first. +If you want to use Qute in your JAX-RS application, you need to add the `quarkus-resteasy-qute` extension first. In your `pom.xml` file, add: [source,xml] @@ -75,7 +72,7 @@ public class HelloResource { If your application is running, you can request the endpoint: -[source, shell] +[source,shell] ---- $ curl -w "\n" http://localhost:8080/hello?name=Martin Hello Martin! @@ -116,7 +113,7 @@ import javax.ws.rs.Path; import javax.ws.rs.QueryParam; import io.quarkus.qute.TemplateInstance; -import io.quarkus.qute.api.CheckedTemplate; +import io.quarkus.qute.CheckedTemplate; @Path("hello") public class HelloResource { @@ -137,8 +134,7 @@ public class HelloResource { <2> `Templates.hello()` returns a new template instance that can be customized before the actual rendering is triggered. In this case, we put the name value under the key `name`. The data map is accessible during rendering. <3> Note that we don't trigger the rendering - this is done automatically by a special `ContainerResponseFilter` implementation. -NOTE: Once you have declared a `@CheckedTemplate` class, we will check that all its methods point to existing templates, so if you try to use a template -from your Java code and you forgot to add it, we will let you know at build time :) +NOTE: Once you have declared a `@CheckedTemplate` class, we will check that all its methods point to existing templates, so if you try to use a template from your Java code and you forgot to add it, we will let you know at build time :) Keep in mind this style of declaration allows you to reference templates declared in other resources too: @@ -165,9 +161,9 @@ public class GoodbyeResource { } ---- -=== Toplevel type-safe templates +=== Top-level type-safe templates -Naturally, if you want to declare templates at the toplevel, directly in `/src/main/resources/templates/hello.txt`, for example, +Naturally, if you want to declare templates at the top-level, directly in `/src/main/resources/templates/hello.txt`, for example, you can declare them in a toplevel (non-nested) `Templates` class: .HelloResource.java @@ -177,7 +173,7 @@ package org.acme.quarkus.sample; import io.quarkus.qute.TemplateInstance; import io.quarkus.qute.Template; -import io.quarkus.qute.api.CheckedTemplate; +import io.quarkus.qute.CheckedTemplate; @CheckedTemplate public class Templates { @@ -240,7 +236,7 @@ import javax.ws.rs.core.MediaType; import io.quarkus.qute.TemplateInstance; import io.quarkus.qute.Template; -import io.quarkus.qute.api.CheckedTemplate; +import io.quarkus.qute.CheckedTemplate; @Path("item") public class ItemResource { diff --git a/docs/src/main/asciidoc/reactive-event-bus.adoc b/docs/src/main/asciidoc/reactive-event-bus.adoc index d21b8e72fbcfc..183d79755e001 100644 --- a/docs/src/main/asciidoc/reactive-event-bus.adoc +++ b/docs/src/main/asciidoc/reactive-event-bus.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the event bus @@ -25,19 +25,20 @@ However, it is limited to single-event behavior (no stream) and to local message This mechanism uses the Vert.x EventBus, so you need to enable the `vertx` extension to use this feature. If you are creating a new project, set the `extensions` parameter are follows: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=vertx-quickstart \ - -Dextensions="vertx,resteasy-mutiny" + -Dextensions="vertx,resteasy-mutiny" \ + -DnoExamples cd vertx-quickstart ---- If you have an already created project, the `vertx` extension can be added to an existing Quarkus project with the `add-extension` command: -[source] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="vertx,resteasy-mutiny" ---- @@ -88,6 +89,18 @@ void consumeBlocking(String message) { // Something blocking } ---- + +Alternatively, you can annotate your method with `@io.smallrye.common.annotation.Blocking`: +[source, java] +---- +@ConsumeEvent(value = "blocking-consumer") +@Blocking +void consumeBlocking(String message) { + // Something blocking +} +---- + +When using `@Blocking`, it ignores the value of the `blocking` attribute of `@ConsumeEvent`. ==== Asynchronous processing is also possible by returning either an `io.smallrye.mutiny.Uni` or a `java.util.concurrent.CompletionStage`: @@ -262,12 +275,13 @@ This message is consumed by another bean and the response is sent using the _rep First create a new project using: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=vertx-http-quickstart \ - -Dextensions="vertx" + -Dextensions="vertx" \ + -DnoExamples cd vertx-http-quickstart ---- @@ -351,14 +365,14 @@ To better understand, let's detail how the HTTP request/response has been handle This application can be packaged using: -[source,shell] +[source,bash] ---- ./mvnw clean package ---- You can also compile it as a native executable with: -[source, shell] +[source,bash] ---- ./mvnw clean package -Pnative ---- diff --git a/docs/src/main/asciidoc/reactive-messaging-http.adoc b/docs/src/main/asciidoc/reactive-messaging-http.adoc new file mode 100644 index 0000000000000..2e653d4ee86f8 --- /dev/null +++ b/docs/src/main/asciidoc/reactive-messaging-http.adoc @@ -0,0 +1,395 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Using HTTP and WebSockets with Reactive Messaging + +include::./attributes.adoc[] + +This guide demonstrates how your Quarkus application can utilize MicroProfile Reactive Messaging to +consume and produce HTTP messages. + +== Prerequisites + +To complete this guide, you need: + +* less than 15 minutes +* an IDE +* JDK 1.8+ installed with `JAVA_HOME` configured appropriately +* Apache Maven {maven-version} +* GraalVM, Docker or Podman installed if you want to run in native mode. + +== Architecture + +In this guide we will implement a service, namely `CostConverter` that consumes HTTP messages +with costs in multiple currencies and converts each cost to its value in Euro. + +To let a user easily try out the service, we will implement an HTTP resource summing up the costs +(`CostCollector`), and a simple web page to add new costs and watch the sum. + + +== Solution + +We recommend that you follow the instructions in the next sections and create the application step by step. +However, you can go right to the completed example. + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `reactive-messaging-http-quickstart` {quickstarts-tree-url}/reactive-messaging-http-quickstart[directory]. + +== Creating the Maven Project + +First, we need a new project. Create a new project with the following command: + +[source,bash,subs=attributes+] +---- +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ + -DprojectGroupId=org.acme \ + -DprojectArtifactId=reactive-messaging-http-quickstart \ + -Dextensions="reactive-messaging-http" \ + -DnoExamples +cd reactive-http +---- + +This command generates a Maven project, importing the Reactive Messaging and HTTP connector extensions. + +== The Converter + +Create the `src/main/java/org/acme/reactivehttp/CostConverter.java` file, with the following content: + +[source, java] +---- +package org.acme.reactivehttp; + +import org.eclipse.microprofile.reactive.messaging.Incoming; +import org.eclipse.microprofile.reactive.messaging.Outgoing; +import org.jboss.logging.Logger; + +import javax.enterprise.context.ApplicationScoped; +import java.util.HashMap; +import java.util.Map; + +/** + * A bean consuming costs in multiple currencies and producing prices in EUR from them + */ +@ApplicationScoped +public class CostConverter { + + private static final Logger log = Logger.getLogger(CostConverter.class); + + private static final Map conversionRatios = new HashMap<>(); + + static { + conversionRatios.put("CHF", 0.93); + conversionRatios.put("USD", 0.84); + conversionRatios.put("PLN", 0.22); + conversionRatios.put("EUR", 1.); + } + + @Incoming("incoming-costs") // <1> + @Outgoing("outgoing-costs") // <2> + double convert(Cost cost) { // <3> + Double conversionRatio = conversionRatios.get(cost.getCurrency().toUpperCase()); + if (conversionRatio == null) { + return 0.; + } + return conversionRatio * cost.getValue(); + } +} + + +---- +<1> Consume messages from the `incoming-costs` stream. +<2> Dispatch returned values to the `outgoing-costs` stream. +<3> Consume an event with payload of type `Cost` and produce a `double`. +In the case of consuming an arbitrary object, the reactive-messaging-http extension will attempt +to deserialize the request body assuming it is JSON. + +Let's define the `Cost` class: +[source, java] +---- +package org.acme.reactivehttp; + +public class Cost { + private double value; + private String currency; + + public double getValue() { + return value; + } + + public void setValue(double value) { + this.value = value; + } + + public String getCurrency() { + return currency; + } + + public void setCurrency(String currency) { + this.currency = currency; + } +} + +---- + + + +In the next step, we will create configurations for both streams in the `application.properties` file. + +== Configuring the HTTP connector + +We need to configure the HTTP connector. This is done in the `application.properties` file. +The keys are structured as follows: + +`mp.messaging.[outgoing|incoming].{channel-name}.{property}=value` + +The `channel-name` segment must match the value set in the `@Incoming` and `@Outgoing` annotation: + +- `incoming-costs` -> a source that receives costs +- `outgoing-costs` -> a sink that receives converted costs + +[source,properties] +---- +mp.messaging.outgoing.outgoing-costs.connector=quarkus-http + +# here we are using a URL pointing to a test endpoint +# you can use e.g. an environment variable to change it +mp.messaging.outgoing.outgoing-costs.url=http://localhost:${quarkus.http.port}/cost-collector + +# POST is the default method. Another possibility is PUT +mp.messaging.outgoing.outgoing-costs.method=POST + + +mp.messaging.incoming.incoming-costs.connector=quarkus-http + +# the incoming-costs channel will be fed via an endpoint on the `/costs` path +mp.messaging.incoming.incoming-costs.path=/costs + +# POST is the default method. Another possibility is PUT +mp.messaging.incoming.incoming-costs.method=POST +---- + + +== The CostCollector +To illustrate that converting messages and passing them through works, let's add an endpoint that will +receive the outgoing costs and sum them up. +This is a usual JAX-RS endpoint. + +[source, java] +---- +package org.acme.reactivehttp; + +import javax.enterprise.context.ApplicationScoped; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; + +@Path("/cost-collector") +@ApplicationScoped +public class CostCollector { + + private double sum = 0; + + @POST + public void consumeCost(String valueAsString) { + sum += Double.parseDouble(valueAsString); + } + + @GET + public double getSum() { + return sum; + } + +} + +---- + +== The HTML page + +To conveniently interact with the application, let's create a simple web page. + +The page will provide a form to add costs, and an info of the current sum of costs. +The page periodically updates the sum by requesting the current sum from `/cost-collector`. + +Create the `src/main/resources/META-INF/resources/index.html` file, with the following content: + +[source, html] +---- + + + + + Costs + + + + + +
    + +

    Add a cost

    +
    +
    + + +
    +
    + + +
    + +
    + + +

    Last cost

    +
    +

    The total cost is N/A €.

    +
    +
    + + + +---- + +== Get it running + +Run the application using: + +[source,bash] +---- +./mvnw quarkus:dev +---- + +Open `http://localhost:8080/index.html` in your browser. + + +== Running Native + +You can build the native executable with: + +[source,bash] +---- +./mvnw package -Pnative +---- + +== Going further + +=== HTTP connector options +All `quarkus-http` connector options: + +[source, properties] +---- +# OUTGOING + +# The target URL +mp.messaging.outgoing..url=http://localhost:8213 + +# Message payload serializer, optional, implementation of `io.quarkus.reactivemessaging.http.runtime.serializers.Serializer` +mp.messaging.outgoing..serializer=com.example.MySerializer + +# The number of attempts to make for sending a request to a remote endpoint. Must not be less than zero +# Zero by default +mp.messaging.outgoing..maxRetries=3 + +# Configures the random factor when using back-off with maxRetries > 0. 0.5 by default +mp.messaging.outgoing..jitter=0.3 + +# Configures a back-off delay between attempts to send a request. +# A random factor (jitter) is applied to increase the delay when several failures happen. +mp.messaging.outgoing..delay=1s + +#The HTTP method (either `POST` or `PUT`), `POST` by default +mp.messaging.outgoing..method=PUT + +#INCOMING +# The HTTP method (either `POST` or `PUT`, `POST` by default +mp.messaging.incoming..method=POST + +# The path of the endpoint +mp.messaging.incoming..path=/my-reactive-ws-endpoint + +# HTTP endpoint buffers messages if a consumer is not able to keep up. This setting specifies the size of the buffer. +# 8 by default. +mp.messaging.incoming..buffer-size=13 + +---- + +=== WebSockets +Except of the `quarkus-http` connector, the `quarkus-reactive-messaging-http` extension also brings in +`quarkus-websocket` - a connector for exposing and feeding WebSockets. +At the moment only binary data is supported. + +NOTE: While the sink of the HTTP connector checks if the message is consumed by the remote endpoint, +the WebSocket sink does not. It may happen that a failure to receive a message is not reported, +e.g. if the remote side closes the WebSocket connection in a crucial moment. + +The `quarkus-websocket` connector is configured with the following properties: + +[source,properties] +---- +# OUTGOING + +# The target URL +mp.messaging.outgoing..url=ws://localhost:8234/ + +# Message serializer, optional, implementation of `io.quarkus.reactivemessaging.http.runtime.serializers.Serializer` +mp.messaging.outgoing..serializer=com.example.MySerializer + +# The number of retries to make for sending a message to a remote websocket endpoint. +# A value greater than 0 is advised. Otherwise, a web socket timeout can result in a dropped message +# The default value is 1 +mp.messaging.outgoing..maxRetries=1 + +# Configures the random factor when using back-off with maxAttempts > 1, 0.5 by default +mp.messaging.outgoing..jitter=0.7 + +# Configures a back-off delay between attempts to send a request. +# A random factor (jitter) is applied to increase the delay when several failures happen. +mp.messaging.outgoing..delay=2s + + +# INCOMING + +# The path of the endpoint +mp.messaging.incoming..path=/my-ws-endpoint + +# Web socket endpoint buffers messages if a consumer is not able to keep up. +# This setting specifies the size of the buffer. 8 by default +mp.messaging.incoming..buffer-size=3 +---- + +=== Reactive Messaging +This extension utilizes MicroProfile Reactive Messaging to build data streaming applications. + +If you did the Kafka or AMQP quickstart, you have realized that it's the same code. +The only difference is the connector configuration. + +If you want to go further check the documentation of https://smallrye.io/smallrye-reactive-messaging[SmallRye Reactive Messaging], the implementation used in Quarkus. diff --git a/docs/src/main/asciidoc/reactive-routes.adoc b/docs/src/main/asciidoc/reactive-routes.adoc index 41c9ed639c291..8ea768c54419e 100644 --- a/docs/src/main/asciidoc/reactive-routes.adoc +++ b/docs/src/main/asciidoc/reactive-routes.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Using Reactive Routes @@ -101,6 +101,20 @@ public void blocking(RoutingContext rc) { } ---- +[NOTE] +==== +Alternatively, you can use `@io.smallrye.common.annotation.Blocking` and omit the `type = Route.HandlerType.BLOCKING`: +[source, java] +---- +@Route(methods = HttpMethod.POST, path = "/post") +@Blocking +public void blocking(RoutingContext rc) { + // ... +} +---- +When `@Blocking` is used, it ignores the `type` attribute of `@Route`. +==== + The `@Route` annotation is repeatable and so you can declare several routes for a single method: [source,java] @@ -186,15 +200,18 @@ A route method must be a non-private non-static method of a CDI bean. If the annotated method returns `void` then it has to accept at least one argument - see the supported types below. If the annotated method does not return `void` then the arguments are optional. +NOTE: Methods that return `void` must __end__ the response or the HTTP request to this route will never end. +Some methods of `RoutingExchange` do it for you, others not and you must call the `end()` method of the response by yourself, please refer to its JavaDoc for more information. + A route method can accept arguments of the following types: * `io.vertx.ext.web.RoutingContext` -* `io.vertx.reactivex.ext.web.RoutingContext` +* `io.vertx.mutiny.ext.web.RoutingContext` * `io.quarkus.vertx.web.RoutingExchange` * `io.vertx.core.http.HttpServerRequest` * `io.vertx.core.http.HttpServerResponse` -* `io.vertx.reactivex.core.http.HttpServerRequest` -* `io.vertx.reactivex.core.http.HttpServerResponse` +* `io.vertx.mutiny.core.http.HttpServerRequest` +* `io.vertx.mutiny.core.http.HttpServerResponse` Furthermore, it is possible to inject the `HttpServerRequest` parameters into a method parameter annotated with `@io.quarkus.vertx.web.Param`: @@ -259,6 +276,18 @@ Person createPerson(@Body Person person, @Param("id") Optional primaryKe } ---- +A failure handler can declare a single method parameter whose type extends `Throwable`. +The type of the parameter is used to match the result of `RoutingContext#failure()`. + +.Failure Handler Example +[source,java] +---- +@Route(type = HandlerType.FAILURE) +void unsupported(UnsupportedOperationException e, HttpServerResponse response) { + response.setStatusCode(501).end(e.getMessage()); +} +---- + === Returning Unis In a reactive route, you can return a `Uni` directly: @@ -310,7 +339,7 @@ String helloSync(RoutingContext context) { ---- Be aware, the processing must be **non-blocking** as reactive routes are invoked on the IO Thread. -Otherwise, use the `blocking` attribute of the `@Route` annotation. +Otherwise, set the `type` attribute of the `@Route` annotation to `Route.HandlerType.BLOCKING`, or use the `@io.smallrye.common.annotation.Blocking` annotation. The method can return: @@ -475,6 +504,40 @@ id: 3 ---- +=== Using Bean Validation + +You can combine reactive routes and Bean Validation. +First, don't forget to add the `quarkus-hibernate-validator` extension to your project. +Then, you can add constraints to your route parameter (annotated with `@Param` or `@Body`): + +[source,java] +---- +@Route(produces = "application/json") +Person createPerson(@Body @Valid Person person, @NonNull @Param("id") String primaryKey) { + // ... +} +---- + +If the parameters do not pass the tests, it returns an HTTP 400 response. +If the request accepts JSON payload, the response follows the https://opensource.zalando.com/problem/constraint-violation/[Problem] format. + +When returning an object or a `Uni`, you can also use the `@Valid` annotation: + +[source,java] +---- +@Route(...) +@Valid Uni createPerson(@Body @Valid Person person, @NonNull @Param("id") String primaryKey) { + // ... +} +---- + +If the item produced by the route does not pass the validation, it returns a HTTP 500 response. +If the request accepts JSON payload, the response follows the https://opensource.zalando.com/problem/constraint-violation/[Problem] format. + +Note that only `@Valid` is supported on the return type. +The returned class can use any constraint. +In the case of `Uni`, it checks the item produced asynchronously. + == Using the Vert.x Web Router You can also register your route directly on the _HTTP routing layer_ by registering routes directly on the `Router` object. @@ -547,7 +610,7 @@ This is enough to generate a basic OpenAPI schema document from your Vert.x Rout [source,bash] ---- -curl http://localhost:8080/openapi +curl http://localhost:8080/q/openapi ---- You will see the generated OpenAPI schema document: @@ -600,7 +663,7 @@ example, adding header info, or specifying the return type on `void` methods mig email = "techsupport@example.com"), license = @License( name = "Apache 2.0", - url = "http://www.apache.org/licenses/LICENSE-2.0.html")) + url = "https://www.apache.org/licenses/LICENSE-2.0.html")) ) @ApplicationScoped public class MyDeclarativeRoutes { @@ -646,7 +709,7 @@ info: email: techsupport@example.com license: name: Apache 2.0 - url: http://www.apache.org/licenses/LICENSE-2.0.html + url: https://www.apache.org/licenses/LICENSE-2.0.html version: 1.0.1 paths: /greetings: @@ -683,7 +746,7 @@ paths: Swagger UI is included by default when running in `dev` or `test` mode, and can optionally added to `prod` mode. See <> Guide for more details. -Navigate to link:http://localhost:8080/swagger-ui/[localhost:8080/swagger-ui/] and you will see the Swagger UI screen: +Navigate to link:http://localhost:8080/q/swagger-ui/[localhost:8080/q/swagger-ui/] and you will see the Swagger UI screen: image:reactive-routes-guide-screenshot01.png[alt=Swagger UI] diff --git a/docs/src/main/asciidoc/reactive-sql-clients.adoc b/docs/src/main/asciidoc/reactive-sql-clients.adoc index 8e7dd391897e3..bf0ad6b551166 100644 --- a/docs/src/main/asciidoc/reactive-sql-clients.adoc +++ b/docs/src/main/asciidoc/reactive-sql-clients.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Reactive SQL Clients @@ -49,7 +49,7 @@ public class Fruit { ==== Do you need a ready-to-use PostgreSQL server to try out the examples? -[source,shell] +[source,bash] ---- docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name quarkus_test -e POSTGRES_USER=quarkus_test -e POSTGRES_PASSWORD=quarkus_test -e POSTGRES_DB=quarkus_test -p 5432:5432 postgres:10.5 ---- @@ -62,18 +62,20 @@ docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name qua First, make sure your project has the `quarkus-reactive-pg-client` extension enabled. If you are creating a new project, set the `extensions` parameter as follows: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=reactive-pg-client-quickstart \ - -Dextensions="reactive-pg-client" + -DclassName="org.acme.vertx.FruitResource" \ + -Dpath="/fruits" \ + -Dextensions="resteasy,reactive-pg-client,resteasy-mutiny" cd reactive-pg-client-quickstart ---- If you have an already created project, the `reactive-pg-client` extension can be added to an existing Quarkus project with the `add-extension` command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="reactive-pg-client" ---- @@ -88,8 +90,16 @@ Otherwise, you can manually add this to the dependencies section of your `pom.xm ---- +=== Mutiny + +Reactive REST endpoints in your application that return Uni or Multi need `Mutiny support for RESTEasy` extension (`io.quarkus:quarkus-resteasy-mutiny`) to work properly: + +[source,bash] +---- +./mvnw quarkus:add-extension -Dextensions="resteasy-mutiny" +---- + [TIP] -.Mutiny ==== In this guide, we will use the Mutiny API of the Reactive PostgreSQL Client. If you're not familiar with Mutiny reactive types, read the link:getting-started-reactive#mutiny[Getting Started with Reactive guide] first. @@ -98,11 +108,11 @@ If you're not familiar with Mutiny reactive types, read the link:getting-started === JSON Binding We will expose `Fruit` instances over HTTP in the JSON format. -Consequently, you also need to add the `quarkus-resteasy-jsonb` extension: +Consequently, you also need to add the `quarkus-resteasy-jackson` extension: -[source,shell] +[source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="resteasy-jsonb" +./mvnw quarkus:add-extension -Dextensions="resteasy-jackson" ---- If you prefer not to use the command line, manually add this to the dependencies section of your `pom.xml` file: @@ -111,7 +121,7 @@ If you prefer not to use the command line, manually add this to the dependencies ---- io.quarkus - quarkus-resteasy-jsonb + quarkus-resteasy-jackson ---- @@ -121,7 +131,7 @@ Of course, this is only a requirement for this guide, not any application using The Reactive PostgreSQL Client can be configured with standard Quarkus datasource properties and a reactive URL: -[source] +[source,properties] .src/main/resources/application.properties ---- quarkus.datasource.db-kind=postgresql @@ -136,8 +146,6 @@ With that you may create your `FruitResource` skeleton and `@Inject` a `io.vertx .src/main/java/org/acme/vertx/FruitResource.java ---- @Path("fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { @Inject @@ -261,14 +269,14 @@ public Multi get() { Now start Quarkus in `dev` mode with: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- Lastly, open your browser and navigate to http://localhost:8080/fruits, you should see: -[source, shell] +[source,json] ---- [{"id":3,"name":"Apple"},{"id":1,"name":"Orange"},{"id":2,"name":"Pear"}] ---- @@ -322,7 +330,7 @@ The same logic applies when saving a `Fruit`: .src/main/java/org/acme/vertx/Fruit.java ---- public Uni save(PgPool client) { - return client.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING (id)").execute(Tuple.of(name)) + return client.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING id").execute(Tuple.of(name)) .onItem().transform(pgRowSet -> pgRowSet.iterator().next().getLong("id")); } ---- @@ -494,9 +502,9 @@ The following snippet shows how to run 2 insertions in the same transaction: ---- public static Uni insertTwoFruits(PgPool client, Fruit fruit1, Fruit fruit2) { return SqlClientHelper.inTransactionUni(client, tx -> { - Uni> insertOne = tx.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING (id)") + Uni> insertOne = tx.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING id") .execute(Tuple.of(fruit1.name)); - Uni> insertTwo = tx.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING (id)") + Uni> insertTwo = tx.preparedQuery("INSERT INTO fruits (name) VALUES ($1) RETURNING id") .execute(Tuple.of(fruit2.name)); return insertOne.and(insertTwo) @@ -514,7 +522,7 @@ You can also create dependent actions as follows: ---- return SqlClientHelper.inTransactionUni(client, tx -> tx - .preparedQuery("INSERT INTO person (firstname,lastname) VALUES ($1,$2) RETURNING (id)") + .preparedQuery("INSERT INTO person (firstname,lastname) VALUES ($1,$2) RETURNING id") .execute(Tuple.of(person.getFirstName(), person.getLastName())) .onItem().transformToUni(id -> tx.preparedQuery("INSERT INTO addr (person_id,addrline1) VALUES ($1,$2)") @@ -523,6 +531,124 @@ return SqlClientHelper.inTransactionUni(client, tx -> tx .onItem().ignore().andContinueWithNull()); ---- +== Multiple Datasources + +The reactive SQL clients support defining several datasources. + +A typical configuration with several datasources would look like: + +[source,properties] +---- +quarkus.datasource.db-kind=postgresql <1> +quarkus.datasource.username=user-default +quarkus.datasource.password=password-default +quarkus.datasource.reactive.url=postgresql://localhost:5432/default + +quarkus.datasource."additional1".db-kind=postgresql <2> +quarkus.datasource."additional1".username=user-additional1 +quarkus.datasource."additional1".password=password-additional1 +quarkus.datasource."additional1".reactive.url=postgresql://localhost:5432/additional1 + +quarkus.datasource."additional2".db-kind=mysql <3> +quarkus.datasource."additional2".username=user-additional2 +quarkus.datasource."additional2".password=password-additional2 +quarkus.datasource."additional2".reactive.url=mysql://localhost:3306/additional2 +---- +<1> The default datasource - using PostgreSQL. +<2> A named datasource called `additional1` - using PostgreSQL. +<3> A named datasource called `additional2` - using MySQL. + +You can then inject the clients as follows: + +[source,java] +---- +@Inject <1> +PgPool defaultClient; + +@Inject +@ReactiveDataSource("additional1") <2> +PgPool additional1Client; + +@Inject +@ReactiveDataSource("additional2") +MySQLPool additional2Client; +---- +<1> Injecting the client for the default datasource does not require anything special. +<2> For a named datasource, you use the `@ReactiveDataSource` CDI qualifier with the datasource name as its value. + +== UNIX Domain Socket connections + +The PostgreSQL and MariaDB/MySQL clients can be configured to connect to the server through a UNIX domain socket. + +First make sure that link:vertx#native-transport[native transport support] is enabled. + +Then configure the database connection url. +This step depends on the database type. + +=== PostgreSQL + +PostgresSQL domain socket paths have the following form: `/.s.PGSQL.` + +The database connection url must be configured so that: + +* the `host` is the `directory` in the socket path +* the `port` is the `port` in the socket path + +Consider the following socket path: `/var/run/postgresql/.s.PGSQL.5432`. + +In `application.properties` add: + +[source,properties] +---- +quarkus.datasource.reactive.url=postgresql://:5432/quarkus_test?host=/var/run/postgresql +---- + +=== MariaDB/MySQL + +The database connection url must be configured so that the `host` is the socket path. + +Consider the following socket path: `/var/run/mysqld/mysqld.sock`. + +In `application.properties` add: + +[source,properties] +---- +quarkus.datasource.reactive.url=mysql:///quarkus_test?host=/var/run/mysqld/mysqld.sock +---- + +== TCP Connection `idle-timeout` + +Reactive datasources can be configured with an `idle-timeout` (in milliseconds) that applies at the TCP connection level. + +NOTE: The `idle-timeout` is disabled by default. + +For example, you could expire idle connections after 60 minutes: + +[source,properties] +---- +quarkus.datasource.reactive.idle-timeout=PT60M +---- + +[WARNING] +==== +This timeout does not apply to the pool, but to the TCP connection. + +If it expires while the connection is idle in the pool, the connection is closed and removed from the pool. + +If it expires immediately after your borrowed the connection from the pool and before you execute a query, you will get an error. + + +While this is unlikely to happen, you must be prepared for such eventuality: + +[source,java] +---- +// Run a query, retry at most 3 times if it fails +Uni> rowSet = client.query("SELECT id, name FROM fruits ORDER BY name ASC") + .execute() + .onFailure().retry().atMost(3); +---- +==== + == Configuration Reference === Common Datasource diff --git a/docs/src/main/asciidoc/redis.adoc b/docs/src/main/asciidoc/redis.adoc index 4d33438cc4964..4476ce8163317 100644 --- a/docs/src/main/asciidoc/redis.adoc +++ b/docs/src/main/asciidoc/redis.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the Redis Client :extension-status: preview @@ -43,12 +43,13 @@ The solution is located in the `redis-quickstart` {quickstarts-tree-url}/redis-q First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=redis-quickstart \ - -Dextensions="redis-client, resteasy-jsonb, resteasy-mutiny" + -Dextensions="redis-client,resteasy-jackson,resteasy-mutiny" \ + -DnoExamples cd redis-quickstart ---- @@ -88,9 +89,9 @@ docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name red Once we have the Redis server running, we need to configure the Redis connection properties. This is done in the `application.properties` configuration file. Edit it to the following content: -[source] +[source,properties] ---- -quarkus.redis.hosts=localhost:6379 <1> +quarkus.redis.hosts=redis://localhost:6379 <1> ---- 1. Configure Redis hosts to connect to. Here we connect to the Redis server we started in the previous section @@ -200,19 +201,14 @@ import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.PathParam; import javax.ws.rs.PUT; -import javax.ws.rs.Consumes; -import javax.ws.rs.Produces; import javax.ws.rs.Path; import javax.ws.rs.POST; import javax.ws.rs.DELETE; -import javax.ws.rs.core.MediaType; import java.util.List; import io.smallrye.mutiny.Uni; @Path("/increments") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class IncrementResource { @Inject @@ -394,7 +390,7 @@ public class IncrementResourceTest { If you followed the instructions, you should have the Redis server running. Then, you just need to run the application using: -[source, shell] +[source,bash] ---- ./mvnw quarkus:dev ---- @@ -409,7 +405,7 @@ a key. === Creating a new increment -[source, shell] +[source,bash] ---- curl -X POST -H "Content-Type: application/json" -d '{"key":"first","value":10}' http://localhost:8080/increments <1> ---- @@ -430,7 +426,7 @@ Running the above command should return the result below: To see the list of current increments keys, run the following command: -[source, shell] +[source,bash] ---- curl http://localhost:8080/increments ---- @@ -441,7 +437,7 @@ The above command should return `["first"]` indicating that we have only one in To retrieve an increment using its key, we will have to run the below command: -[source, shell] +[source,bash] ---- curl http://localhost:8080/increments/first <1> ---- @@ -460,7 +456,7 @@ curl http://localhost:8080/increments/first <1> To increment a value, run the following command: -[source, shell] +[source,bash] ---- curl -X PUT -H "Content-Type: application/json" -d '27' http://localhost:8080/increments/first <1> ---- @@ -483,7 +479,7 @@ Now, running the command `curl http://localhost:8080/increments/first` should re Use the command below, to delete an increment given its key. -[source, shell] +[source,bash] ---- curl -X DELETE http://localhost:8080/increments/first <1> ---- @@ -498,7 +494,7 @@ You can run the application as a conventional jar file. First, we will need to package it: -[source, shell] +[source,bash] ---- ./mvnw package ---- @@ -507,9 +503,9 @@ NOTE: This command will start a Redis instance to execute the tests. Thus your R Then run it: -[source, shell] +[source,bash] ---- -java -jar ./target/redis-quickstart-1.0-SNAPSHOT-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- == Running Native @@ -523,16 +519,16 @@ Compiling a native executable takes a bit longer, as GraalVM performs additional steps to remove unnecessary codepaths. Use the `native` profile to compile a native executable: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative ---- Once the build is finished, you can run the executable with: -[source, shell] +[source,bash] ---- -./target/redis-quickstart-1.0-SNAPSHOT-runner +./target/redis-quickstart-1.0.0-SNAPSHOT-runner ---- == Connection Health Check @@ -540,10 +536,44 @@ Once the build is finished, you can run the executable with: If you are using the `quarkus-smallrye-health` extension, `quarkus-vertx-redis` will automatically add a readiness health check to validate the connection to the Redis server. -So when you access the `/health/ready` endpoint of your application you will have information about the connection validation status. +So when you access the `/q/health/ready` endpoint of your application you will have information about the connection validation status. This behavior can be disabled by setting the `quarkus.redis.health.enabled` property to `false` in your `application.properties`. +== Multiple Redis Clients + +The Redis extension allows you to configure multiple clients. +Using several clients works the same way as having a single client. + +[source,properties] +---- +quarkus.redis.hosts=redis://localhost:6379 +quarkus.redis.second.hosts=redis://localhost:6379 +---- + +Notice there's an extra bit in the key (the `second` segment). +The syntax is as follows: `quarkus.redis.[optional name.][redis configuration property]`. +If the name is omitted, it configures the default client. + +=== Named Redis client Injection + +When using multiple clients, you can select the client to inject using the `io.quarkus.redis.client.RedisClientName` qualifier. +Using the above properties to configure three different clients, you can also inject each one as follows: + +[source,java,indent=0] +---- +@Inject +RedisClient defaultRedisClient; + +@Inject +@RedisClientName("second") +RedisClient redisClient2; + +@Inject +@RedisClientName("second") +ReactiveRedisClient reactiveClient2; +---- + == Configuration Reference include::{generated-dir}/config/quarkus-redis-client.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/rest-client-multipart.adoc b/docs/src/main/asciidoc/rest-client-multipart.adoc index 42341ae3d8185..f84ff56f4de4c 100644 --- a/docs/src/main/asciidoc/rest-client-multipart.adoc +++ b/docs/src/main/asciidoc/rest-client-multipart.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the REST Client with Multipart @@ -11,7 +11,7 @@ RESTEasy has rich support for the `multipart/*` and `multipart/form-data` mime t This guide explains how to use the MicroProfile REST Client with Multipart in order to interact with REST APIs -requiring multipart/form-data content-type with very little effort. +requiring `multipart/form-data` content-type with very little effort. == Prerequisites @@ -35,37 +35,28 @@ The solution is located in the `rest-client-multipart-quickstart` {quickstarts-t First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-client-multipart-quickstart \ -DclassName="org.acme.rest.client.multipart.MultipartClientResource" \ -Dpath="/client" \ - -Dextensions="rest-client, resteasy" + -Dextensions="rest-client, resteasy, resteasy-multipart" cd rest-client-multipart-quickstart ---- This command generates the Maven project with a REST endpoint and imports the `rest-client` and `resteasy` extensions. - -Then, we'll add the following dependency to support `multipart/form-data` requests: - -[source,xml] ----- - - org.jboss.resteasy - resteasy-multipart-provider - ----- +It also adds the `resteasy-multipart` extension to support `multipart/form-data` requests. == Setting up the model -In this guide we will be demonstrating how to invoke a REST service accepting multipart/form-data input. +In this guide we will be demonstrating how to invoke a REST service accepting `multipart/form-data` input. We are assuming the payload is well-known before the request is sent, so we can model as a POJO. [NOTE] ==== -If the payload is unknown, you can also use the RESTEasy custom API instead. If that's the case, see the RESTEasy Multipart Providers link in the end of the guide. +If the payload is unknown, you can also use the RESTEasy custom API instead. If that's the case, see the RESTEasy Multipart Providers link at the end of the guide. ==== Our first order of business is to setup the model we will be using to define the `multipart/form-data` payload, in the form of a `MultipartBody` POJO. @@ -156,7 +147,7 @@ It will allow to narrow down the number of JAX-RS providers (which can be seen a In order to determine the base URL to which REST calls will be made, the REST Client uses configuration from `application.properties`. The name of the property needs to follow a certain convention which is best displayed in the following code: -[source,shell] +[source,properties] ---- # Your configuration properties org.acme.rest.client.multipart.MultipartService/mp-rest/url=http://localhost:8080/ @@ -274,7 +265,7 @@ The code above uses link:http://rest-assured.io/[REST Assured] to assert that th Because the test runs in a different port, we also need to include an `application.properties` in our `src/test/resources` with the following content: -[source,shell] +[source,properties] ---- # Your configuration properties org.acme.rest.client.multipart.MultipartService/mp-rest/url=http://localhost:8081/ @@ -287,7 +278,7 @@ org.acme.rest.client.multipart.MultipartService/mp-rest/url=http://localhost:808 You should see an output similar to: -[source,shell] +[source,text] ---- --89d288bd-960f-460c-b266-64c5b4d170fa Content-Disposition: form-data; name="fileName" @@ -302,10 +293,10 @@ HELLO WORLD --89d288bd-960f-460c-b266-64c5b4d170fa-- ---- -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. == Further reading - * link:https://download.eclipse.org/microprofile/microprofile-rest-client-1.2.1/microprofile-rest-client-1.2.1.html[MicroProfile Rest Client specification] - * link:https://docs.jboss.org/resteasy/docs/4.3.1.Final/userguide/html/Multipart.html[RESTEasy Multipart Providers] + * link:https://download.eclipse.org/microprofile/microprofile-rest-client-1.4.1/microprofile-rest-client-1.4.1.html[MicroProfile Rest Client specification] + * link:https://docs.jboss.org/resteasy/docs/4.5.6.Final/userguide/html/Multipart.html[RESTEasy Multipart Provider] diff --git a/docs/src/main/asciidoc/rest-client.adoc b/docs/src/main/asciidoc/rest-client.adoc index 5cb611e2b2fdf..b16d5a665632e 100644 --- a/docs/src/main/asciidoc/rest-client.adoc +++ b/docs/src/main/asciidoc/rest-client.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using the REST Client @@ -34,30 +34,28 @@ The solution is located in the `rest-client-quickstart` {quickstarts-tree-url}/r First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-client-quickstart \ -DclassName="org.acme.rest.client.CountriesResource" \ -Dpath="/country" \ - -Dextensions="rest-client, resteasy-jsonb" + -Dextensions="resteasy,resteasy-jackson,rest-client,rest-client-jackson" cd rest-client-quickstart ---- -This command generates the Maven project with a REST endpoint and imports the `rest-client` and `resteasy-jsonb` extensions. +This command generates the Maven project with a REST endpoint and imports: -[NOTE] -==== -If your application does not expose any JAX-RS endpoints (eg. a link:command-mode-reference[command mode application]), use the `rest-client-jsonb` or the `rest-client-jackson` extension instead. -==== +* the `resteasy` and `resteasy-jackson` extensions for the REST server support; +* the `rest-client` and `rest-client-jackson` extensions for the REST client support. -If you already have your Quarkus project configured, you can add the `rest-client` and the `rest-client-jsonb` extensions +If you already have your Quarkus project configured, you can add the `rest-client` and the `rest-client-jackson` extensions to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="rest-client,resteasy-jsonb" +./mvnw quarkus:add-extension -Dextensions="rest-client,rest-client-jackson" ---- This will add the following to your `pom.xml`: @@ -70,7 +68,7 @@ This will add the following to your `pom.xml`: io.quarkus - quarkus-resteasy-jsonb + quarkus-rest-client-jackson ---- @@ -87,6 +85,9 @@ package org.acme.rest.client; import java.util.List; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) public class Country { public String name; @@ -102,7 +103,8 @@ public class Country { } ---- -The model above is only a subset of the fields provided by the service, but it suffices for the purposes of this guide. +The model above is only a subset of the fields provided by the service (thus the `@JsonIgnoreProperties` annotation), +but it suffices for the purposes of this guide. == Create the interface @@ -126,7 +128,6 @@ public interface CountriesService { @GET @Path("/name/{name}") - @Produces("application/json") Set getByName(@PathParam String name); } ---- @@ -142,18 +143,52 @@ CDI injection as a REST Client [NOTE] ==== -While `@Consumes` and `@Produces` are optional as auto-negotiation is supported, -it is heavily recommended to annotate your endpoints with them to define precisely the expected content-types. +When a JSON extension is installed such as `quarkus-rest-client-jackson` or `quarkus-rest-client-jsonb`, Quarkus will use the `application/json` media type +by default for most return values, unless the media type is explicitly set via +`@Produces` or `@Consumes` annotations (there are some exceptions for well known types, such as `String` and `File`, which default to `text/plain` and `application/octet-stream` +respectively). + +If you don't want JSON by default you can set `quarkus.resteasy-json.default-json=false` and the default will change back to being auto-negotiated. If you set this +you will need to add `@Produces(MediaType.APPLICATION_JSON)` and `@Consumes(MediaType.APPLICATION_JSON)` to your endpoints in order to use JSON. +If you don't rely on the JSON default, it is heavily recommended to annotate your endpoints with the `@Produces` and `@Consumes` annotations to define precisely the expected content-types. It will allow to narrow down the number of JAX-RS providers (which can be seen as converters) included in the native executable. ==== +=== Query Parameters + +If the GET request requires query parameters you can leverage the `@QueryParam("parameter-name")` annotation instead of (or in addition to) the `@PathParam`. Path and query parameters can be combined, as required, as illustrated in a mock example below. + +[source, java] +---- +package org.acme.rest.client; + +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.jboss.resteasy.annotations.jaxrs.PathParam; +import org.jboss.resteasy.annotations.jaxrs.QueryParam; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import java.util.Set; + +@Path("/v2") +@RegisterRestClient +public interface CountriesService { + + @GET + @Path("/name/{region}") + Set getByRegion(@PathParam String region, @QueryParam("city") String city); +} +---- + + == Create the configuration In order to determine the base URL to which REST calls will be made, the REST Client uses configuration from `application.properties`. The name of the property needs to follow a certain convention which is best displayed in the following code: -[source,shell] +[source,properties] ---- # Your configuration properties org.acme.rest.client.CountriesService/mp-rest/url=https://restcountries.eu/rest # // <1> @@ -185,6 +220,16 @@ country-api/mp-rest/url=https://restcountries.eu/rest # country-api/mp-rest/scope=javax.inject.Singleton # / ---- + +=== Disabling Hostname Verification + +To disable the SSL hostname verification for a specific REST client, add the following property to your configuration: + +[source,properties] +---- +country-api/mp-rest/hostnameVerifier=io.quarkus.restclient.NoopHostnameVerifier +---- + == Update the JAX-RS resource Open the `src/main/java/org/acme/rest/client/CountriesResource.java` file and update it with the following content: @@ -211,7 +256,6 @@ public class CountriesResource { @GET @Path("/name/{name}") - @Produces(MediaType.APPLICATION_JSON) public Set name(@PathParam String name) { return countriesService.getByName(name); } @@ -261,7 +305,7 @@ The code above uses link:http://rest-assured.io/[REST Assured]'s link:https://gi == Async Support The rest client supports asynchronous rest calls. -Async support comes in 2 flavors: you can return a `CompletionStage` or a `Uni` (requires the `quarkus-resteasy-mutiny` extension). +Async support comes in 2 flavors: you can return a `CompletionStage` or a `Uni` (requires the `quarkus-rest-client-mutiny` extension). Let's see it in action by adding a `getByNameAsync` method in our `CountriesService` REST interface. The code should look like: [source, java] @@ -320,14 +364,12 @@ public class CountriesResource { @GET @Path("/name/{name}") - @Produces(MediaType.APPLICATION_JSON) public Set name(@PathParam String name) { return countriesService.getByName(name); } @GET @Path("/name-async/{name}") - @Produces(MediaType.APPLICATION_JSON) public CompletionStage> nameAsync(@PathParam String name) { return countriesService.getByNameAsync(name); } @@ -409,7 +451,6 @@ public class CountriesResource { @GET @Path("/name-uni/{name}") - @Produces(MediaType.APPLICATION_JSON) public Uni> nameAsync(@PathParam String name) { return countriesService.getByNameAsUni(name); } @@ -441,6 +482,68 @@ If you use a `CompletionStage`, you would need to call the service's method to r This difference comes from the laziness aspect of Mutiny and its subscription protocol. More details about this can be found in https://smallrye.io/smallrye-mutiny/#_uni_and_multi[the Mutiny documentation]. +== Custom headers support +The MicroProfile REST client allows amending request headers by registering a `ClientHeadersFactory` with the `@RegisterClientHeaders` annotation. + +Let's see it in action by adding a `@RegisterClientHeaders` annotation pointing to a `RequestUUIDHeaderFactory` class in our `CountriesService` REST interface: + +[source, java] +---- +package org.acme.rest.client; + +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.jboss.resteasy.annotations.jaxrs.PathParam; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import java.util.concurrent.CompletionStage; +import java.util.Set; + +@Path("/v2") +@RegisterRestClient +@RegisterClientHeaders(RequestUUIDHeaderFactory.class) +public interface CountriesService { + + @GET + @Path("/name/{name}") + @Produces("application/json") + Set getByName(@PathParam String name); + + @GET + @Path("/name/{name}") + @Produces("application/json") + CompletionStage> getByNameAsync(@PathParam String name); +} +---- + +And the `RequestUUIDHeaderFactory` would look like: + +[source, java] +---- +package org.acme.rest.client; + +import org.eclipse.microprofile.rest.client.ext.ClientHeadersFactory; +import org.jboss.resteasy.specimpl.MultivaluedMapImpl; + +import javax.enterprise.context.ApplicationScoped; +import javax.ws.rs.core.MultivaluedMap; +import java.util.UUID; + +@ApplicationScoped +public class RequestUUIDHeaderFactory implements ClientHeadersFactory { + + @Override + public MultivaluedMap update(MultivaluedMap incomingHeaders, MultivaluedMap clientOutgoingHeaders) { + MultivaluedMap result = new MultivaluedMapImpl<>(); + result.add("X-request-uuid", UUID.randomUUID().toString()); + return result; + } +} +---- + +Any CDI bean matching the declared factory class will be picked up and can of course benefit from the usual CDI infrastructure. + == Package and run the application Run the application with: `./mvnw compile quarkus:dev`. @@ -448,9 +551,22 @@ Open your browser to http://localhost:8080/country/name/greece. You should see a JSON object containing some basic information about Greece. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. +== REST Client and RESTEasy interactions + +In Quarkus, the REST Client extension and link:rest-json[the RESTEasy extension] share the same infrastructure. +One important consequence of this consideration is that they share the same list of providers (in the JAX-RS meaning of the word). + +For instance, if you declare a `WriterInterceptor`, it will by default intercept both the servers calls and the client calls, +which might not be the desired behavior. + +However, you can change this default behavior and constrain a provider to: + +* only consider *client* calls by adding the `@ConstrainedTo(RuntimeType.CLIENT)` annotation to your provider; +* only consider *server* calls by adding the `@ConstrainedTo(RuntimeType.SERVER)` annotation to your provider. + == Using a Mock HTTP Server for tests Setting up a mock HTTP server, against which tests are run, is a common testing pattern. diff --git a/docs/src/main/asciidoc/rest-data-panache.adoc b/docs/src/main/asciidoc/rest-data-panache.adoc index ce279c9fa70e6..7b9fface7e183 100644 --- a/docs/src/main/asciidoc/rest-data-panache.adoc +++ b/docs/src/main/asciidoc/rest-data-panache.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Generating JAX-RS resources with Panache @@ -12,13 +12,13 @@ A lot of web applications are monotonous CRUD applications with REST APIs that a To streamline this task, REST Data with Panache extension can generate the basic CRUD endpoints for your entities and repositories. While this extension is still experimental and provides a limited feature set, we hope to get an early feedback for it. -Currently this extension supports Hibernate ORM with Panache and can generate CRUD resources that work with `application/json` and `application/hal+json` content. +Currently this extension supports Hibernate ORM and MongoDB with Panache and can generate CRUD resources that work with `application/json` and `application/hal+json` content. include::./status-include.adoc[] == Setting up REST Data with Panache -To begin with: +=== Hibernate ORM * Add the required dependencies to your `pom.xml` ** Hibernate ORM REST Data with Panache extension (`quarkus-hibernate-orm-rest-data-panache`) @@ -44,12 +44,35 @@ To begin with: ---- * Implement the Panache entities and/or repositories as explained in the link:hibernate-orm-panache[Hibernate ORM with Panache guide]. -* Define the interfaces for generation as explained in the sections below. +* Define the interfaces for generation as explained in the resource generation section. + +=== MongoDB + +* Add the required dependencies to your `pom.xml` +** MongoDB REST Data with Panache extension (`quarkus-mongodb-rest-data-panache`) +** One of the RESTEasy JSON serialization extensions (`quarkus-resteasy-jackson` or `quarkus-resteasy-jsonb`) + +[source,xml] +---- + + + io.quarkus + quarkus-mongodb-rest-data-panache + + + io.quarkus + quarkus-resteasy-jackson + + +---- + +* Implement the Panache entities and/or repositories as explained in the link:mongodb-panache[MongoDB with Panache guide]. +* Define the interfaces for generation as explained in the resource generation section. == Generating resources REST Data with Panache generates JAX-RS resources based on the interfaces available in your application. -For each entity and repository that you want to generate, provide an interface that extends either `PanacheEntityResource` or `PanacheRepositoryResource` interface. +For each entity and repository that you want to generate, provide a resource interface. _Do not implement these interfaces and don't provide custom methods because they will be ignored._ You can, however, override the methods from the extended interface in order to customize them (see the section at the end). === PanacheEntityResource @@ -72,52 +95,92 @@ public interface PeopleResource extends PanacheRepositoryResource { +} +---- + +=== PanacheMongoRepositoryResource + +If your application has a simple entity (e.g. `Person`) and a repository (e.g. `PersonRepository`) that implements either `PanacheMongoRepository` or `PanacheMongoRepositoryBase` interface, you could instruct REST Data with Panache to generate its JAX-RS resource with the following interface: + +[source,java] +---- +public interface PeopleResource extends PanacheMongoRepositoryResource { +} +---- + === The generated resource The generated resources will be functionally equivalent for both entities and repositories. -The only difference being the particular data access pattern in use. +The only difference being the particular data access pattern and data storage in use. -If you have defined one of the `PeopleResource` interfaces mentioned above, REST Data with Panache will generate a JAX-RS resource similar to this: +If you have defined one of the `PeopleResource` interfaces mentioned above, this extension will generate its implementation using a particular data access strategy. +The implemented class then will be used by a generated JAX-RS resource, which will look like this: [source,java] ---- -@Path("/people") // Default path is a hyphenated lowercase resource name without a suffix of `resource` or `controller`. -public class PeopleResourceImpl implements PeopleResource { // The actual class name is going to be unique +public class PeopleResourceJaxRs { // The actual class name is going to be unique + @Inject + PeopleResource resource; + @GET - @Produces("application/json") @Path("{id}") + @Produces("application/json") public Person get(@PathParam("id") Long id){ - // ... + Person person = resource.get(id); + if (person == null) { + throw new WebApplicationException(404); + } + return person; } @GET @Produces("application/json") - public Response list(){ - // ... + public Response list(@QueryParam("sort") List sortQuery, + @QueryParam("page") @DefaultValue("0") int pageIndex, + @QueryParam("size") @DefaultValue("20") int pageSize) { + Page page = Page.of(pageIndex, pageSize); + Sort sort = getSortFromQuery(sortQuery); + List people = resource.list(page, sort); + // ... build a response with page links and return a 200 response with a list } @Transactional @POST @Consumes("application/json") @Produces("application/json") - public Response add(Person entity) { - // .. + public Response add(Person personToSave) { + Person person = resource.add(person); + // ... build a new location URL and return 201 response with an entity } @Transactional @PUT + @Path("{id}") @Consumes("application/json") @Produces("application/json") - @Path("{id}") - public Response update(@PathParam("id") Long id, Person person) { - // .. + public Response update(@PathParam("id") Long id, Person personToSave) { + if (resource.get(id) == null) { + Person person = resource.update(id, personToSave); + return Response.status(204).build(); + } + Person person = resource.update(id, personToSave); + // ... build a new location URL and return 201 response with an entity } @Transactional @DELETE @Path("{id}") public void delete(@PathParam("id") Long id) { - // .. + if (!resource.delete(id)) { + throw new WebApplicationException(404); + } } } ---- @@ -132,10 +195,10 @@ It can be used in your resource interface: @ResourceProperties(hal = true, path = "my-people") public interface PeopleResource extends PanacheEntityResource { @MethodProperties(path = "all") - Response list(); + List list(Page page, Sort sort); @MethodProperties(exposed = false) - void delete(Long id); + boolean delete(Long id); } ---- @@ -143,19 +206,33 @@ public interface PeopleResource extends PanacheEntityResource { `@ResourceProperties` -* `hal` - in addition to the standard `application/json` responses, generates additional methods that can return `application/hal+json` responses if requested via an `Accept` header. -Default is `false`. +* `exposed` - whether resource could be exposed. A global resource property that can be overridden for each method. Default is `true`. * `path` - resource base path. Default path is a hyphenated lowercase resource name without a suffix of `resource` or `controller`. * `paged` - whether collection responses should be paged or not. First, last, previous and next page URIs are included in the response headers if they exist. Request page index and size are taken from the `page` and `size` query parameters that default to `0` and `20` respectively. Default is `true`. +* `hal` - in addition to the standard `application/json` responses, generates additional methods that can return `application/hal+json` responses if requested via an `Accept` header. +Default is `false`. +* `halCollectionName` - name that should be used when generating a hal collection response. Default name is a hyphenated lowercase resource name without a suffix of `resource` or `controller`. `@MethodProperties` * `exposed` - does not expose a particular HTTP verb when set to `false`. Default is `true`. * `path` - operation path (this is appended to the resource base path). Default is an empty string. +== Query parameters + +REST Data with Panache supports the following query parameters with the generated resources. + +* `page` - a page number which should be returned by a list operation. +It applies to the paged resources only and is a number starting with 0. Default is 0. +* `size` - a page size which should be returned by a list operation. +It applies to the paged resources only and is a number starting with 1. Default is 20. +* `sort` - a comma separated list of fields which should be used for sorting a result of a list operation. +Fields are sorted in the ascending order unless they're prefixed with a `-`. +E.g. `?sort=name,-age` will sort the result by the name ascending by the age descending. + == Response body examples As mentioned above REST Data with Panache supports the `application/json` and `application/hal+json` response content types. @@ -183,11 +260,21 @@ Here are a couple of examples of how a response body would look like for the `ge "name": "John Johnson", "birth": "1988-01-10", "_links": { - "self": "http://example.com/people/1", - "remove": "http://example.com/people/1", - "update": "http://example.com/people/1", - "add": "http://example.com/people", - "list": "http://example.com/people" + "self": { + "href": "http://example.com/people/1" + }, + "remove": { + "href": "http://example.com/people/1" + }, + "update": { + "href": "http://example.com/people/1" + }, + "add": { + "href": "http://example.com/people" + }, + "list": { + "href": "http://example.com/people" + } } } ---- @@ -224,11 +311,21 @@ Here are a couple of examples of how a response body would look like for the `ge "name": "John Johnson", "birth": "1988-01-10", "_links": { - "self": "http://example.com/people/1", - "remove": "http://example.com/people/1", - "update": "http://example.com/people/1", - "add": "http://example.com/people", - "list": "http://example.com/people" + "self": { + "href": "http://example.com/people/1" + }, + "remove": { + "href": "http://example.com/people/1" + }, + "update": { + "href": "http://example.com/people/1" + }, + "add": { + "href": "http://example.com/people" + }, + "list": { + "href": "http://example.com/people" + } } }, { @@ -236,20 +333,40 @@ Here are a couple of examples of how a response body would look like for the `ge "name": "Peter Peterson", "birth": "1986-11-20", "_links": { - "self": "http://example.com/people/2", - "remove": "http://example.com/people/2", - "update": "http://example.com/people/2", - "add": "http://example.com/people", - "list": "http://example.com/people" + "self": { + "href": "http://example.com/people/2" + }, + "remove": { + "href": "http://example.com/people/2" + }, + "update": { + "href": "http://example.com/people/2" + }, + "add": { + "href": "http://example.com/people" + }, + "list": { + "href": "http://example.com/people" + } } } ], "_links": { - "add": "http://example.com/people", - "list": "http://example.com/people", - "first": "http://example.com/people?page=0&size=2", - "last": "http://example.com/people?page=2&size=2", - "next": "http://example.com/people?page=1&size=2" + "add": { + "href": "http://example.com/people" + }, + "list": { + "href": "http://example.com/people" + }, + "first": { + "href": "http://example.com/people?page=0&size=2" + }, + "last": { + "href": "http://example.com/people?page=2&size=2" + }, + "next": { + "href": "http://example.com/people?page=1&size=2" + } } } ---- diff --git a/docs/src/main/asciidoc/rest-json.adoc b/docs/src/main/asciidoc/rest-json.adoc index 423afc4905304..64f81c91bc295 100644 --- a/docs/src/main/asciidoc/rest-json.adoc +++ b/docs/src/main/asciidoc/rest-json.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Writing JSON REST Services @@ -41,54 +41,57 @@ The solution is located in the `rest-json-quickstart` {quickstarts-tree-url}/res First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-json-quickstart \ -DclassName="org.acme.rest.json.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jsonb" + -Dextensions="resteasy,resteasy-jackson" cd rest-json-quickstart ---- -This command generates a Maven structure importing the RESTEasy/JAX-RS and http://json-b.net/[JSON-B] extensions, +This command generates a Maven structure importing the RESTEasy/JAX-RS and https://github.com/FasterXML/jackson[Jackson] extensions, and in particular adds the following dependency: [source,xml] ---- io.quarkus - quarkus-resteasy-jsonb + quarkus-resteasy-jackson ---- -Quarkus also supports https://github.com/FasterXML/jackson[Jackson] so, if you prefer Jackson over JSON-B, you can create a project relying on the RESTEasy Jackson extension instead: +[NOTE] +==== +To improve user experience, Quarkus registers the three Jackson https://github.com/FasterXML/jackson-modules-java8[Java 8 modules] so you don't need to do it manually. +==== + +Quarkus also supports http://json-b.net/[JSON-B] so, if you prefer JSON-B over Jackson, you can create a project relying on the RESTEasy JSON-B extension instead: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-json-quickstart \ -DclassName="org.acme.rest.json.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jackson" + -Dextensions="resteasy-jsonb" cd rest-json-quickstart ---- -This command generates a Maven structure importing the RESTEasy/JAX-RS and https://github.com/FasterXML/jackson[Jackson] extensions, +This command generates a Maven structure importing the RESTEasy/JAX-RS and http://json-b.net/[JSON-B] extensions, and in particular adds the following dependency: [source,xml] ---- io.quarkus - quarkus-resteasy-jackson + quarkus-resteasy-jsonb ---- -To improve user experience, Quarkus registers the three Jackson https://github.com/FasterXML/jackson-modules-java8[Java 8 modules] so you don't need to do it manually. - == Creating your first JSON REST service In this example, we will create an application to manage a list of fruits. @@ -126,17 +129,12 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.Set; -import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; @Path("/fruits") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public class FruitResource { private Set fruits = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); @@ -172,17 +170,90 @@ depending on the extension you chose when initializing the project. [NOTE] ==== -While RESTEasy supports auto-negotiation, when using Quarkus, it is very important to define the `@Produces` and `@Consumes` annotations. -They are analyzed at build time and Quarkus restricts the number of JAX-RS providers included in the native executable to the minimum required by the application. -It allows to reduce the size of the native executable. +When a JSON extension is installed such as `quarkus-resteasy-jackson` or `quarkus-resteasy-jsonb`, Quarkus will use the `application/json` media type +by default for most return values, unless the media type is explicitly set via +`@Produces` or `@Consumes` annotations (there are some exceptions for well known types, such as `String` and `File`, which default to `text/plain` and `application/octet-stream` +respectively). + +If you don't want JSON by default you can set `quarkus.resteasy-json.default-json=false` and the default will change back to being auto-negotiated. If you set this +you will need to add `@Produces(MediaType.APPLICATION_JSON)` and `@Consumes(MediaType.APPLICATION_JSON)` to your endpoints in order to use JSON. + +If you don't rely on the JSON default, it is heavily recommended to annotate your endpoints with the `@Produces` and `@Consumes` annotations to define precisely the expected content-types. +It will allow to narrow down the number of JAX-RS providers (which can be seen as converters) included in the native executable. ==== === Configuring JSON support +==== Jackson + +In Quarkus, the default Jackson `ObjectMapper` obtained via CDI (and consumed by the Quarkus extensions) is configured to ignore unknown properties +(by disabling the `DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES` feature). + +You can restore the default behavior of Jackson by setting `quarkus.jackson.fail-on-unknown-properties=true` in your `application.properties` +or on a per class basis via `@JsonIgnoreProperties(ignoreUnknown = false)`. + +Furthermore, the `ObjectMapper` is configured to format dates and time in ISO-8601 +(by disabling the `SerializationFeature.WRITE_DATES_AS_TIMESTAMPS` feature). + +The default behaviour of Jackson can be restored by setting `quarkus.jackson.write-dates-as-timestamps=true` +in your `application.properties`. If you want to change the format for a single field, you can use the +`@JsonFormat` annotation. + +Also, Quarkus makes it very easy to configure various Jackson settings via CDI beans. +The simplest (and suggested) approach is to define a CDI bean of type `io.quarkus.jackson.ObjectMapperCustomizer` +inside of which any Jackson configuration can be applied. + +An example where a custom module needs to be registered would look like so: + +[source,java] +---- +import com.fasterxml.jackson.databind.ObjectMapper; +import io.quarkus.jackson.ObjectMapperCustomizer; +import javax.inject.Singleton; + +@Singleton +public class RegisterCustomModuleCustomizer implements ObjectMapperCustomizer { + + public void customize(ObjectMapper mapper) { + mapper.registerModule(new CustomModule()); + } +} +---- + +Users can even provide their own `ObjectMapper` bean if they so choose. +If this is done, it is very important to manually inject and apply all `io.quarkus.jackson.ObjectMapperCustomizer` beans in the CDI producer that produces `ObjectMapper`. +Failure to do so will prevent Jackson specific customizations provided by various extensions from being applied. + +[source,java] +---- +import com.fasterxml.jackson.databind.ObjectMapper; +import io.quarkus.jackson.ObjectMapperCustomizer; + +import javax.enterprise.inject.Instance; +import javax.inject.Singleton; + +public class CustomObjectMapper { + + // Replaces the CDI producer for ObjectMapper built into Quarkus + @Singleton + ObjectMapper objectMapper(Instance customizers) { + ObjectMapper mapper = myObjectMapper(); // Custom `ObjectMapper` + + // Apply all ObjectMapperCustomerizer beans (incl. Quarkus) + for (ObjectMapperCustomizer customizer : customizers) { + customizer.customize(mapper); + } + + return mapper; + } +} +---- + ==== JSON-B -Quarkus makes it very easy to configure various JSON-B settings via CDI beans. The simplest (and suggested) approach is to define a CDI bean of type `io.quarkus.jsonb.JsonbConfigCustomizer` -inside of which any JSON-B configuration can be applied. +As stated above, Quarkus provides the option of using JSON-B instead of Jackson via the use of the `quarkus-resteasy-jsonb` extension. + +Following the same approach as described in the previous section, JSON-B can be configured using a `io.quarkus.jsonb.JsonbConfigCustomizer` bean. If for example a custom serializer named `FooSerializer` for type `com.example.Foo` needs to be registered with JSON-B, the addition of a bean like the following would suffice: @@ -206,31 +277,31 @@ A more advanced option would be to directly provide a bean of `javax.json.bind.J If the latter approach is leveraged it is very important to manually inject and apply all `io.quarkus.jsonb.JsonbConfigCustomizer` beans in the CDI producer that produces `javax.json.bind.Jsonb`. Failure to do so will prevent JSON-B specific customizations provided by various extensions from being applied. -==== Jackson +[source,java] +---- +import io.quarkus.jsonb.JsonbConfigCustomizer; -As stated above, Quarkus provides the option of using https://github.com/FasterXML/jackson[Jackson] instead of JSON-B via the use of the `quarkus-resteasy-jackson` extension. +import javax.enterprise.context.Dependent; +import javax.enterprise.inject.Instance; +import javax.json.bind.JsonbConfig; -Following the same approach as described in the previous section, Jackson's `ObjectMapper` can be configured using a `io.quarkus.jackson.ObjectMapperCustomizer` bean. -An example where a custom module needs to be registered would like so: +public class CustomJsonbConfig { -[source,java] ----- -import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.jackson.ObjectMapperCustomizer; -import javax.inject.Singleton; + // Replaces the CDI producer for JsonbConfig built into Quarkus + @Dependent + JsonbConfig jsonConfig(Instance customizers) { + JsonbConfig config = myJsonbConfig(); // Custom `JsonbConfig` -@Singleton -public class RegisterCustomModuleCustomizer implements ObjectMapperCustomizer { + // Apply all JsonbConfigCustomizer beans (incl. Quarkus) + for (JsonbConfigCustomizer customizer : customizers) { + customizer.customize(config); + } - public void customize(ObjectMapper mapper) { - mapper.registerModule(new CustomModule()); + return config; } } ---- -Users can even provide their own `ObjectMapper` bean if they so choose. -If this is done, it is very important to manually inject and apply all `io.quarkus.jackson.ObjectMapperCustomizer` beans in the CDI producer that produces `ObjectMapper`. -Failure to do so will prevent Jackson specific customizations provided by various extensions from being applied. == Creating a frontend @@ -248,7 +319,7 @@ You can now interact with your REST service: You can build a native executable with the usual command `./mvnw package -Pnative`. -Running it is as simple as executing `./target/rest-json-quickstart-1.0-SNAPSHOT-runner`. +Running it is as simple as executing `./target/rest-json-quickstart-1.0.0-SNAPSHOT-runner`. You can then point your browser to `http://localhost:8080/fruits.html` and use your application. @@ -266,7 +337,6 @@ When you have the following REST method, Quarkus determines that `Fruit` will be [source,JAVA] ---- @GET -@Produces("application/json") public List list() { // ... } @@ -286,7 +356,6 @@ Your REST method then looks like this: [source,JAVA] ---- @GET -@Produces("application/json") public Response list() { // ... } @@ -367,7 +436,7 @@ Open a browser to http://localhost:8080/legumes.html and you will see our list o The interesting part starts when running the application as a native executable: * create the native executable with `./mvnw package -Pnative`. - * execute it with `./target/rest-json-quickstart-1.0-SNAPSHOT-runner` + * execute it with `./target/rest-json-quickstart-1.0.0-SNAPSHOT-runner` * open a browser and go to http://localhost:8080/legumes.html No legumes there. @@ -398,7 +467,7 @@ Let's do that and follow the same steps as before: * hit `Ctrl+C` to stop the application * create the native executable with `./mvnw package -Pnative`. - * execute it with `./target/rest-json-quickstart-1.0-SNAPSHOT-runner` + * execute it with `./target/rest-json-quickstart-1.0.0-SNAPSHOT-runner` * open a browser and go to http://localhost:8080/legumes.html This time, you can see our list of legumes. @@ -492,7 +561,7 @@ public class LoggingFilter implements ContainerRequestFilter { Now, whenever a REST method is invoked, the request will be logged into the console: -[source,shell] +[source,text] ---- 2019-06-05 12:44:26,526 INFO [org.acm.res.jso.LoggingFilter] (executor-thread-1) Request GET /legumes from IP 127.0.0.1 2019-06-05 12:49:19,623 INFO [org.acm.res.jso.LoggingFilter] (executor-thread-1) Request GET /fruits from IP 0:0:0:0:0:0:0:1 @@ -523,26 +592,69 @@ quarkus.resteasy.gzip.max-input=10M // <2> <2> Configure the upper limit on deflated request body. This is useful to mitigate potential attacks by limiting their reach. The default value is `10M`. This configuration option would recognize strings in this format (shown as a regular expression): `[0-9]+[KkMmGgTtPpEeZzYy]?`. If no suffix is given, assume bytes. +Once GZip support has been enabled you can use it on an endpoint by adding the `@org.jboss.resteasy.annotations.GZIP` annotation to your endpoint method. + +If you want to compress everything then we recommended that you use the `quarkus.http.enable-compression=true` setting instead to globally enable +compression support. + +== Multipart Support + +RESTEasy supports multipart via the https://docs.jboss.org/resteasy/docs/4.5.6.Final/userguide/html/Multipart.html[RESTEasy Multipart Provider]. + +Quarkus provides an extension called `quarkus-resteasy-multipart` to make things easier for you. + +This extension slightly differs from the RESTEasy default behavior as the default charset (if none is specified in your request) is UTF-8 rather than US-ASCII. + +You can configure this behavior with the following configuration properties: + +include::{generated-dir}/config/quarkus-resteasy-multipart.adoc[leveloffset=+1, opts=optional] + == Servlet compatibility In Quarkus, RESTEasy can either run directly on top of the Vert.x HTTP server, or on top of Undertow if you have any servlet dependency. As a result, certain classes, such as `HttpServletRequest` are not always available for injection. Most use-cases for this particular class are covered by JAX-RS equivalents, except for getting the remote client's IP. RESTEasy comes with a replacement API which you can inject: -https://docs.jboss.org/resteasy/docs/4.4.2.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html[`HttpRequest`], which has the methods -https://docs.jboss.org/resteasy/docs/4.4.2.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html#getRemoteAddress--[`getRemoteAddress()`] -and https://docs.jboss.org/resteasy/docs/4.4.2.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html#getRemoteHost--[`getRemoteHost()`] +https://docs.jboss.org/resteasy/docs/4.5.6.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html[`HttpRequest`], which has the methods +https://docs.jboss.org/resteasy/docs/4.5.6.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html#getRemoteAddress--[`getRemoteAddress()`] +and https://docs.jboss.org/resteasy/docs/4.5.6.Final/javadocs/org/jboss/resteasy/spi/HttpRequest.html#getRemoteHost--[`getRemoteHost()`] to solve this problem. +== RESTEasy and REST Client interactions + +In Quarkus, the RESTEasy extension and link:rest-client[the REST Client extension] share the same infrastructure. +One important consequence of this consideration is that they share the same list of providers (in the JAX-RS meaning of the word). + +For instance, if you declare a `WriterInterceptor`, it will by default intercept both the servers calls and the client calls, +which might not be the desired behavior. + +However, you can change this default behavior and constrain a provider to: + +* only consider *server* calls by adding the `@ConstrainedTo(RuntimeType.SERVER)` annotation to your provider; +* only consider *client* calls by adding the `@ConstrainedTo(RuntimeType.CLIENT)` annotation to your provider. + == What's Different from Jakarta EE Development === No Need for `Application` Class Configuration via an application-supplied subclass of `Application` is supported, but not required. +=== Only a single JAX-RS application + +In contrast to JAX-RS (and RESTeasy) running in a standard servlet-container, Quarkus only supports the deployment of a single JAX-RS application. +If multiple JAX-RS `Application` classes are defined, the build will fail with the message `Multiple classes have been annotated with @ApplicationPath which is currently not supported`. + +If multiple JAX-RS applications are defined, the property `quarkus.resteasy.ignoreApplicationClasses=true` can be used to ignore all explicit `Application` classes. This makes all resource-classes available via the application-path as defined by `quarkus.resteasy.path` (default: `/`). + +=== Support limitations of JAX-RS application + +The RESTEasy extension doesn't support the method `getProperties()` of the class `javax.ws.rs.core.Application`. Moreover, it only relies on the methods `getClasses()` and `getSingletons()` to filter out the annotated resource, provider and feature classes. +It doesn't filter out the built-in resource, provider and feature classes and also the resource, provider and feature classes registered by the other extensions. +Finally the objects returned by the method `getSingletons()` are ignored, only the classes are took into account to filter out the resource, provider and feature classes, in other words the method `getSingletons()` is actually managed the same way as `getClasses()`. + === Lifecycle of Resources -In Quarkus all JAX-RS resources are treated as CDI beans. +In Quarkus all JAX-RS resources are treated as CDI beans. It's possible to inject other beans via `@Inject`, bind interceptors using bindings such as `@Transactional`, define `@PostConstruct` callbacks, etc. If there is no scope annotation declared on the resource class then the scope is defaulted. @@ -551,6 +663,28 @@ If set to `true` (default) then a *single instance* of a resource class is creat If set to `false` then a *new instance* of the resource class is created per each request. An explicit CDI scope annotation (`@RequestScoped`, `@ApplicationScoped`, etc.) always overrides the default behavior and specifies the lifecycle of resource instances. +== Include/Exclude JAX-RS classes with build time conditions + +Quarkus enables the inclusion or exclusion of JAX-RS Resources, Providers and Features directly thanks to build time conditions in the same that it does for CDI beans. +Thus, the various JAX-RS classes can be annotated with profile conditions (`@io.quarkus.arc.profile.IfBuildProfile` or `@io.quarkus.arc.profile.UnlessBuildProfile`) and/or with property conditions (`io.quarkus.arc.properties.IfBuildProperty` or `io.quarkus.arc.properties.UnlessBuildProperty`) to indicate to Quarkus at build time under which conditions these JAX-RS classes should be included. + +In the following example, Quarkus includes the endpoint `sayHello` if and only if the build profile `app1` has been enabled. + +[source,java] +---- +@IfBuildProfile("app1") +public class ResourceForApp1Only { + + @GET + @Path("sayHello") + public String sayHello() { + return "hello"; + } +} +---- + +Please note that if a JAX-RS Application has been detected and the method `getClasses()` and/or `getSingletons()` has/have been overridden, Quarkus will ignore the build time conditions and consider only what has been defined in the JAX-RS Application. + == Conclusion Creating JSON REST services with Quarkus is easy as it relies on proven and well known technologies. diff --git a/docs/src/main/asciidoc/resteasy-reactive.adoc b/docs/src/main/asciidoc/resteasy-reactive.adoc new file mode 100644 index 0000000000000..15d7ed851f6ae --- /dev/null +++ b/docs/src/main/asciidoc/resteasy-reactive.adoc @@ -0,0 +1,1830 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Writing REST Services with RESTEasy Reactive + +include::./attributes.adoc[] +:jaxrsapi: https://javadoc.io/doc/javax.ws.rs/javax.ws.rs-api/2.1.1 +:jaxrsspec: /specs/jaxrs/2.1/index.html +:jdkapi: https://docs.oracle.com/en/java/javase/11/docs/api/java.base +:mutinyapi: https://smallrye.io/smallrye-mutiny/apidocs +:httpspec: https://tools.ietf.org/html/rfc7231 +:jsonpapi: https://javadoc.io/doc/javax.json/javax.json-api/1.1.4 +:vertxapi: https://javadoc.io/static/io.vertx/vertx-core/3.9.4 + +This guide explains how to write REST Services with RESTEasy Reactive in Quarkus. + +== What is RESTEasy Reactive? + +RESTEasy Reactive is a new link:{jaxrsspec}[JAX-RS] +implementation written from the ground up to work on our +common https://vertx.io/[Vert.x] layer and is thus fully reactive, while also being very tightly integrated with +Quarkus and consequently moving a lot of work to build time. + +You should be able to use it in place of any JAX-RS implementation, but on top of that it has +great performance for both blocking and non-blocking endpoints, and a lot of new features on top +of what JAX-RS provides. + +== Writing endpoints + +=== Getting started + +Add the following import to your `pom.xml` file: + +[source,xml] +---- + + io.quarkus + quarkus-resteasy-reactive + +---- + +You can now write your first endpoint in the `org.acme.rest.Endpoint` class: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +@Path("") +public class Endpoint { + + @GET + public String hello(){ + return "Hello, World!"; + } +} +---- + +=== Terminology + +REST:: https://en.wikipedia.org/wiki/Representational_state_transfer[REpresentational State Transfer] +Endpoint:: Java method which is called to serve a REST call +URL / URI (Uniform Resource Locator / Identifier):: Used to identify the location of REST resources (https://tools.ietf.org/html/rfc7230#section-2.7[specification]) +Resource:: Represents your domain object. This is what your API serves and modifies. Also called an `entity` in JAX-RS. +Representation:: How your resource is represented on the wire, can vary depending on content types +Content type:: Designates a particular representation (also called a media type), for example `text/plain` or `application/json` +HTTP:: Underlying wire protocol for routing REST calls (see https://tools.ietf.org/html/rfc7230[HTTP specifications]) +HTTP request:: the request part of the HTTP call, consisting of an HTTP method, a target URI, headers and an optional message body +HTTP response:: the response part of the HTTP call, consisting of an HTTP response status, headers and an optional message body + +=== Declaring endpoints: URI mapping + +Any class annotated with a link:{jaxrsapi}/javax/ws/rs/Path.html[`@Path`] annotation can have its methods exposed as REST endpoints, +provided they have an HTTP method annotation (see below). + +That link:{jaxrsapi}/javax/ws/rs/Path.html[`@Path`] annotation defines the URI prefix under which those methods will be exposed. It can +be empty, or contain a prefix such as `rest` or `rest/V1`. + +Each exposed endpoint method can in turn have another link:{jaxrsapi}/javax/ws/rs/Path.html[`@Path`] annotation which adds to its containing +class annotation. For example, this defines a `rest/hello` endpoint: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +@Path("rest") +public class Endpoint { + + @Path("hello") + @GET + public String hello(){ + return "Hello, World!"; + } +} +---- + +See <> for more information about URI mapping. + +You can set the root path for all rest endpoints using the `@ApplicationPath` annotation, as shown below. + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.ApplicationPath; +import javax.ws.rs.core.Application; + +@ApplicationPath("/api") +public static class MyApplication extends Application { + +} +---- + +This will cause all rest endpoints to be resolve relative to `/api`, so the endpoint above with `@Path("rest")` would +be accessible at `/api/rest/. You can also set the `quarkus.rest.path` build time property to set the root path if you +don't want to use an annotation. + +=== Declaring endpoints: HTTP methods + +Each endpoint method must be annotated with one of the following annotations, which defines which HTTP +method will be mapped to the method: + +.Table HTTP method annotations +|=== +|Annotation|Usage + +|link:{jaxrsapi}/javax/ws/rs/GET.html[`@GET`] +|Obtain a resource representation, should not modify state, link:{httpspec}#section-4.2.2[idempotent] (link:{httpspec}#section-4.3.1[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/HEAD.html[`@HEAD`] +|Obtain metadata about a resource, similar to `GET` with no body (link:{httpspec}#section-4.3.2[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/POST.html[`@POST`] +|Create a resource and obtain a link to it (link:{httpspec}#section-4.3.3[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/PUT.html[`@PUT`] +|Replace a resource or create one, should be link:{httpspec}#section-4.2.2[idempotent] (link:{httpspec}#section-4.3.4[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/DELETE.html[`@DELETE`] +|Delete an existing resource, link:{httpspec}#section-4.2.2[idempotent] (link:{httpspec}#section-4.3.5[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/OPTIONS.html[`@OPTIONS`] +|Obtain information about a resource, link:{httpspec}#section-4.2.2[idempotent] (link:{httpspec}#section-4.3.7[HTTP docs]) + +|link:{jaxrsapi}/javax/ws/rs/PATCH.html[`@PATCH`] +|Update a resource, or create one, not link:{httpspec}#section-4.2.2[idempotent] (https://tools.ietf.org/html/rfc5789#section-2[HTTP docs]) + +|=== + +You can also declare other HTTP methods by declaring them as an annotation with the +link:{jaxrsapi}/javax/ws/rs/HttpMethod.html[`@HttpMethod`] annotation: + +[source,java] +---- +package org.acme.rest; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +import javax.ws.rs.HttpMethod; +import javax.ws.rs.Path; + +@Retention(RetentionPolicy.RUNTIME) +@HttpMethod("FROMAGE") +@interface FROMAGE { +} + +@Path("") +public class Endpoint { + + @FROMAGE + public String hello(){ + return "Hello, Cheese World!"; + } +} +---- + +=== Declaring endpoints: representation / content types + +Each endpoint method may consume or produce specific resource representations, which are indicated by +the HTTP link:{httpspec}#section-3.1.1.5[`Content-Type`] header, which in turn contains +link:{httpspec}#section-3.1.1.1[MIME (Media Type)] values, such as the following: + +- `text/plain` which is the default for any endpoint returning a `String`. +- `text/html` for HTML (such as with link:qute[Qute templating]) +- `application/json` for a <> +- `text/*` which is a sub-type wildcard for any text media type +- `\*/*` which is a wildcard for any media type + +You may annotate your endpoint class with the link:{jaxrsapi}/javax/ws/rs/Produces.html[`@Produces`] +or link:{jaxrsapi}/javax/ws/rs/Consumes.html[`@Consumes`] annotations, which +allow you to specify one or more media types that your endpoint may accept as HTTP request body +or produce as HTTP response body. Those class annotations apply to each method. + +Any method may also be annotated with the link:{jaxrsapi}/javax/ws/rs/Produces.html[`@Produces`] +or link:{jaxrsapi}/javax/ws/rs/Consumes.html[`@Consumes`] annotations, in which +case they override any eventual class annotation. + +The link:{jaxrsapi}/javax/ws/rs/core/MediaType.html[`MediaType`] class has many constants you +can use to point to specific pre-defined media types. + +See <> for more information. + +=== Accessing request parameters + +[[request-parameters]] + +NOTE: don't forget to configure your compiler to generate parameter name information with `-parameters` (javac) +or `` or `` (https://maven.apache.org/plugins/maven-compiler-plugin/compile-mojo.html#parameters[Maven]). + +The following HTTP request elements may be obtained by your endpoint method: + +.Table HTTP request parameter annotations +|=== +|HTTP element|Annotation|Usage + +|[[path-parameter]]Path parameter +|`@RestPath` (or nothing) +|URI template parameter (simplified version of the https://tools.ietf.org/html/rfc6570[URI Template specification]), +see <> for more information. + +|Query parameter +|`@RestQuery` +|The value of an https://tools.ietf.org/html/rfc3986#section-3.4[URI query parameter] + +|Header +|`@RestHeader` +|The value of an https://tools.ietf.org/html/rfc7230#section-3.2[HTTP header] + +|Cookie +|`@RestCookie` +|The value of an https://tools.ietf.org/html/rfc6265#section-4.2[HTTP cookie] + +|Form parameter +|`@RestForm` +|The value of an https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST[HTTP URL-encoded FORM] + +|Matrix parameter +|`@RestMatrix` +|The value of an https://tools.ietf.org/html/rfc3986#section-3.3[URI path segment parameter] + +|=== + +For each of those annotations, you may specify the name of the element they refer to, otherwise +they will use the name of the annotated method parameter. + +If a client made the following HTTP call: + +[source,http] +---- +POST /cheeses;variant=goat/tomme?age=matured HTTP/1.1 +Content-Type: application/x-www-form-urlencoded +Cookie: level=hardcore +X-Cheese-Secret-Handshake: fist-bump + +smell=strong +---- + +Then you could obtain all the various parameters with this endpoint method: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.POST; +import javax.ws.rs.Path; + +import org.jboss.resteasy.reactive.RestCookie; +import org.jboss.resteasy.reactive.RestForm; +import org.jboss.resteasy.reactive.RestHeader; +import org.jboss.resteasy.reactive.RestMatrix; +import org.jboss.resteasy.reactive.RestPath; +import org.jboss.resteasy.reactive.RestQuery; + +@Path("/cheeses/{type}") +public class Endpoint { + + @POST + public String allParams(@RestPath String type, + @RestMatrix String variant, + @RestQuery String age, + @RestCookie String level, + @RestHeader("X-Cheese-Secret-Handshake") + String secretHandshake, + @RestForm String smell){ + return type + "/" + variant + "/" + age + "/" + level + "/" + secretHandshake + "/" + smell; + } +} +---- + +NOTE: the `@RestPath` annotation is optional: any parameter whose name matches an existing URI +template variable will be automatically assumed to have `@RestPath`. + +You can also use any of the JAX-RS annotations link:{jaxrsapi}/javax/ws/rs/PathParam.html[`@PathParam`], +link:{jaxrsapi}/javax/ws/rs/QueryParam.html[`@QueryParam`], +link:{jaxrsapi}/javax/ws/rs/HeaderParam.html[`@HeaderParam`], +link:{jaxrsapi}/javax/ws/rs/CookieParam.html[`@CookieParam`], +link:{jaxrsapi}/javax/ws/rs/FormParam.html[`@FormParam`] or +link:{jaxrsapi}/javax/ws/rs/MatrixParam.html[`@MatrixParam`] for this, +but they require you to specify the parameter name. + +See <> for more advanced use-cases. + +=== Declaring URI parameters + +[[uri-parameters]] + +You can declare URI parameters and use regular expressions in your path, so for instance +the following endpoint will serve requests for `/hello/stef/23` and `/hello` but not +`/hello/stef/0x23`: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +@Path("hello") +public class Endpoint { + + @Path("{name}/{age:\\d+}") + @GET + public String personalisedHello(String name, int age){ + return "Hello " + name + " is your age really " + age + "?"; + } + + @GET + public String genericHello(){ + return "Hello stranger"; + } +} +---- + + +=== Accessing the request body + +Any method parameter with no annotation will receive the method body.footnote:[Unless it is a +<> or a <>.], after it has been mapped from +its HTTP representation to the Java type of the parameter. + +The following parameter types will be supported out of the box: + +[[resource-types]] + +.Table Request body parameter type +|=== +|Type|Usage + +|link:{jdkapi}/java/io/File.html[`File`] +|The entire request body in a temporary file + +|`byte[]` +|The entire request body, not decoded + +|`char[]` +|The entire request body, decoded + +|link:{jdkapi}/java/lang/String.html[`String`] +|The entire request body, decoded + +|link:{jdkapi}/java/io/InputStream.html[`InputStream`] +|The request body in a blocking stream + +|link:{jdkapi}/java/io/Reader.html[`Reader`] +|The request body in a blocking stream + +|All Java primitives and their wrapper classes +|Java primitive types + +|link:{jdkapi}/java/math/BigDecimal.html[`BigDecimal`], link:{jdkapi}/java/math/BigInteger.html[`BigInteger`] +|Large integers and decimals. + +|link:{jsonpapi}/javax/json/JsonArray.html[`JsonArray`], link:{jsonpapi}/javax/json/JsonArray.html[`JsonObject`], +link:{jsonpapi}/javax/json/JsonArray.html[`JsonStructure`], link:{jsonpapi}/javax/json/JsonArray.html[`JsonValue`] +|JSON value types + +|link:{vertxapi}io/vertx/core/buffer/Buffer.html[`Buffer`] +|Vert.x Buffer + +|any other type +|Will be <> + +|=== + +NOTE: You can add support for more <>. + +=== Handling Multipart Form data + +To handle HTTP requests that have `multipart/form-data` as their content type, RESTEasy Reactive introduces the `@org.jboss.resteasy.reactive.MultipartForm` annotation. +Let us look at an example of its use. + +Assuming an HTTP request containing a file upload and a form value containing a string description need to be handled, we could write a POJO +that will hold this information like so: + +[source,java] +---- +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.jboss.resteasy.reactive.PartType; +import org.jboss.resteasy.reactive.RestForm; +import org.jboss.resteasy.reactive.multipart.FileUpload; + +public class FormData { + + @RestForm + @PartType(MediaType.TEXT_PLAIN) + public String description; + + @RestForm("image") + public FileUpload file; +} +---- + +The `name` field will contain the data contained in the part of HTTP request called `description` (because `@RestForm` does not define a value, the field name is used), +while the `file` field will contain data about the uploaded file in the `image` part of HTTP request. + +NOTE: `FileUpload` provides access to various metadata of the uploaded file. If however all you need is a handle to the uploaded file, `java.nio.file.Path` or `java.io.File` could be used. + +NOTE: `@PartType` is used to aid in deserialization of the corresponding part of the request into the desired Java type. It is very useful when for example the corresponding body part is JSON +and needs to be converted to a POJO. + +This POJO could be used in a Resource method like so: + +[source,java] +---- +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import org.jboss.resteasy.reactive.MultipartForm; + +@Path("multipart") +public class Endpoint { + + @POST + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.MULTIPART_FORM_DATA) + @Path("form") + public String form(@MultipartForm FormData formData) { + // return something + } +} +---- + +The use of `@MultipartForm` as method parameter makes RESTEasy Reactive handle the request as a multipart form request. + +WARNING: When handling file uploads, it is very important to move the file to permanent storage (like a database, a dedicated file system or a cloud storage) in your code that handles the POJO. +Otherwise, the file will no longer be accessible when the request terminates. +Moreoever if `quarkus.http.body.delete-uploaded-files-on-end` is set to true, Quarkus will delete the uploaded file when the HTTP response is sent. If the setting is disabled, +the file will reside on the file system of the server (in the directory defined by the `quarkus.http.body.uploads-directory` configuration option), but as the uploaded files are saved +with a UUID file name and no additional metadata is saved, these files are essentially a random dump of files. + + +=== Returning a response body + +In order to return an HTTP response, simply return the resource you want from your method. The method +return type and its optional content type will be used to decide how to serialise it to the HTTP +response (see <> for more advanced information). + +You can return any of the pre-defined types that you can read from the <>, +and any other type will be mapped <>. + +Alternately, you can also return a <> such as link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`], +link:{mutinyapi}/io/smallrye/mutiny/Multi.html[`Multi`] or +link:{jdkapi}/java/util/concurrent/CompletionStage.html[`CompletionStage`] +that resolve to one of the mentioned return types. + +=== Setting other response properties + +If you need to set more properties on the HTTP response than just the body, such as the status code +or headers, you can make your method return the link:{jaxrsapi}/javax/ws/rs/core/Response.html[`Response`] +type: + +[source,java] +---- +package org.acme.rest; + +import java.time.Duration; +import java.time.Instant; +import java.util.Date; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.NewCookie; +import javax.ws.rs.core.Response; + +@Path("") +public class Endpoint { + + @GET + public Response hello() { + // HTTP OK status with text/plain content type + return Response.ok("Hello, World!", MediaType.TEXT_PLAIN_TYPE) + // set a response header + .header("X-FroMage", "Camembert") + // set the Expires response header to two days from now + .expires(Date.from(Instant.now().plus(Duration.ofDays(2)))) + // send a new cookie + .cookie(new NewCookie("Flavour", "praliné")) + // end of builder API + .build(); + } +} +---- + +=== Async/reactive support + +[[reactive]] + +If your endpoint method needs to accomplish an asynchronous or reactive task before +being able to answer, you can declare your method to return the +link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`] type (from https://smallrye.io/smallrye-mutiny/[Mutiny]), in which +case the current HTTP request will be automatically suspended after your method, until +the returned link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`] instance resolves to a value, +which will be mapped to a response exactly according to the previously described rules: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.smallrye.mutiny.Uni; + +@Path("escoffier") +public class Endpoint { + + @GET + public Uni culinaryGuide(){ + return Book.findByIsbn("978-2081229297"); + } +} +---- + +This allows you to not block the event-loop thread while the book is being fetched from the +database, and allows Quarkus to serve more requests until your book is ready to +be sent to the client and terminate this request. Check out our +<> for more information. + +The link:{jdkapi}/java/util/concurrent/CompletionStage.html[`CompletionStage`] return +type is also supported. + +=== Streaming support + +If you want to stream your response element by element, you can make your endpoint method return a +link:{mutinyapi}/io/smallrye/mutiny/Multi.html[`Multi`] type (from https://smallrye.io/smallrye-mutiny/[Mutiny]). +This is especially useful for streaming text or binary data. + +This example, using link:reactive-messaging-http[Reactive Messaging] shows how to stream +text data: + +[source,java] +---- +package org.acme.rest; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import org.eclipse.microprofile.reactive.messaging.Channel; + +import io.smallrye.mutiny.Multi; + +@Path("logs") +public class Endpoint { + + @Inject + @Channel("log-out") + Multi logs; + + @GET + public Multi streamLogs() { + return logs; + } +} +---- + +=== Server-Sent Event (SSE) support + +If you want to stream JSON objects in your response, you can use +https://html.spec.whatwg.org/multipage/server-sent-events.html[Server-Sent Events] +by just annotating your endpoint method with +link:{jaxrsapi}/javax/ws/rs/Produces.html[`@Produces(MediaType.SERVER_SENT_EVENTS)`] +and specifying that each element should be <> with +`@RestSseElementType(MediaType.APPLICATION_JSON)`. + +[source,java] +---- +package org.acme.rest; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import org.jboss.resteasy.reactive.RestSseElementType; + +import io.smallrye.mutiny.Multi; +import io.smallrye.mutiny.Uni; + +import io.smallrye.reactive.messaging.annotations.Channel; + +@Path("escoffier") +public class Endpoint { + + // Inject our Book channel + @Inject + @Channel("book-out") + Multi books; + + @GET + // Send the stream over SSE + @Produces(MediaType.SERVER_SENT_EVENTS) + // Each element will be sent as JSON + @RestSseElementType(MediaType.APPLICATION_JSON) + public Multi stream() { + return books; + } +} +---- + +=== Accessing context objects + +[[context-objects]] + +There are a number of contextual objects that the framework will give you, if your endpoint +method takes parameters of the following type: + +.Table Context object +|=== +|Type|Usage + +|link:{jaxrsapi}/javax/ws/rs/core/HttpHeaders.html[`HttpHeaders`] +|All the request headers + +|link:{jaxrsapi}/javax/ws/rs/container/ResourceInfo.html[`ResourceInfo`] +|Information about the current endpoint method and class (requires reflection) + +|link:{jaxrsapi}/javax/ws/rs/core/SecurityContext.html[`SecurityContext`] +|Access to the current user and roles + +|`SimpleResourceInfo` +|Information about the current endpoint method and class (no reflection required) + +|link:{jaxrsapi}/javax/ws/rs/core/UriInfo.html[`UriInfo`] +|Provides information about the current endpoint and application URI + +|link:{jaxrsapi}/javax/ws/rs/core/Application.html[`Application`] +|Advanced: Current JAX-RS application class + +|link:{jaxrsapi}/javax/ws/rs/core/Configuration.html[`Configuration`] +|Advanced: Configuration about the deployed JAX-RS application + +|link:{jaxrsapi}/javax/ws/rs/ext/Providers.html[`Providers`] +|Advanced: Runtime access to JAX-RS providers + +|link:{jaxrsapi}/javax/ws/rs/core/Request.html[`Request`] +|Advanced: Access to the current HTTP method and <> + +|link:{jaxrsapi}/javax/ws/rs/core/ResourceContext.html[`ResourceContext`] +|Advanced: access to instances of endpoints + +|`ServerRequestContext` +|Advanced: RESTEasy Reactive access to the current request/response + +|link:{jaxrsapi}/javax/ws/rs/sse/Sse.html[`Sse`] +|Advanced: Complex SSE use-cases + +|=== + +For example, here is how you can return the name of the currently logged-in user: + +[source,java] +---- +package org.acme.rest; + +import java.security.Principal; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.SecurityContext; + +@Path("user") +public class Endpoint { + + @GET + public String userName(SecurityContext security) { + Principal user = security.getUserPrincipal(); + return user != null ? user.getName() : ""; + } +} +---- + +You can also inject those context objects using +https://javadoc.io/static/javax.inject/javax.inject/1/javax/inject/Inject.html[`@Inject`] on fields of the same +type: + +[source,java] +---- +package org.acme.rest; + +import java.security.Principal; + +import javax.inject.Inject; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.SecurityContext; + +@Path("user") +public class Endpoint { + + @Inject + SecurityContext security; + + @GET + public String userName() { + Principal user = security.getUserPrincipal(); + return user != null ? user.getName() : ""; + } +} +---- + +Or even on your endpoint constructor: + +[source,java] +---- +package org.acme.rest; + +import java.security.Principal; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.SecurityContext; + +@Path("user") +public class Endpoint { + + SecurityContext security; + + Endpoint(SecurityContext security){ + this.security = security; + } + + @GET + public String userName() { + Principal user = security.getUserPrincipal(); + return user != null ? user.getName() : ""; + } +} +---- + + +=== JSON serialisation + +[[json]] + +Instead of importing `io.quarkus:quarkus-resteasy-reactive`, you can import either of the following modules to get support for JSON: + +.Table Context object +|=== +|GAV|Usage + +|`io.quarkus:quarkus-resteasy-reactive-jackson` +|https://github.com/FasterXML/jackson[Jackson support] + +|`io.quarkus:quarkus-resteasy-reactive-jsonb` +|http://json-b.net/[JSONB support] + +|=== + +In both cases, importing those modules will allow HTTP message bodies to be read from JSON +and serialised to JSON, for <>. + +==== Advanced Jackson-specific features + +When using the `quarkus-resteasy-reactive-jackson` extension there are some advanced features that RESTEasy Reactive supports. + +===== @JsonView support + +JAX-RS methods can be annotated with https://fasterxml.github.io/jackson-annotations/javadoc/2.10/com/fasterxml/jackson/annotation/JsonView.html[@JsonView] +in order to customize the serialization of the returned POJO, on a per method-basis. This is best explained with an example. + +A typical use of `@JsonView` is to hide certain fields on certain methods. In that vein, let's define two views: + +[source,java] +---- +public class Views { + + public static class Public { + } + + public static class Private extends Public { + } +} +---- + +Let's assume we have the `User` POJO on which we want to hide some field during serialization. A simple example of this is: + +[source,java] +---- +public class User { + + @JsonView(Views.Private.class) + public int id; + + @JsonView(Views.Public.class) + public String name; +} +---- + +Depending on the JAX-RS method that returns this user, we might want to exclude the `id` field from serialization - for example you might want an insecure method +to not expose this field. The way we can achieve that in RESTEasy Reactive is shown in the following example: + +[source,java] +---- +@JsonView(Views.Public.class) +@GET +@Path("/public") +public User userPublic() { + return testUser(); +} + +@JsonView(Views.Private.class) +@GET +@Path("/private") +public User userPrivate() { + return testUser(); +} +---- + +When the result the `userPublic` method is serialized, the `id` field will not be contained in the response as the `Public` view does not include it. +The result of `userPrivate` however will include the `id` as expected when serialized. + +===== Completely customized per method serialization + +There are times when you need to completely customize the serialization of a POJO on a per JAX-RS method basis. For such use cases, the `@io.quarkus.resteasy.reactive.jackson.CustomSerialization` annotation +is a great tool, as it allows you to configure a per-method `com.fasterxml.jackson.databind.ObjectWriter` which can be configured at will. + +Here is an example use case: + +[source,java] +---- +@CustomSerialization(UnquotedFields.class) +@GET +@Path("/invalid-use-of-custom-serializer") +public User invalidUseOfCustomSerializer() { + return testUser(); +} +---- + +where `UnquotedFields` is a `BiFunction` defined as so: + +[source,java] +---- +public static class UnquotedFields implements BiFunction { + + @Override + public ObjectWriter apply(ObjectMapper objectMapper, Type type) { + return objectMapper.writer().without(JsonWriteFeature.QUOTE_FIELD_NAMES); + } +} +---- + +Essentially what this class does is force Jackson to not include quotes in the field names. + +It is important to note that this customization is only performed for the serialization of the JAX-RS methods that use `@CustomSerialization(UnquotedFields.class)`. + +== More advanced usage + +Here are some more advanced topics that you may not need to know about initially, but +could prove useful for more complex use cases. + +=== Execution model, blocking, non-blocking + +[[execution-model]] + +RESTEasy Reactive is implemented using two main thread types: + +- Event-loop threads: which are responsible, among other things, for reading bytes from the HTTP request and + writing bytes back to the HTTP response +- Worker threads: they are pooled and can be used to offload long-running operations + +The event-loop threads (also called IO threads) are responsible for actually performing all the IO +operations in an asynchronous way, and to trigger any listener interested in the completion of those +IO operations. + +By default, RESTEasy Reactive will run endpoint methods on the event-loop threads, on the assumption that +they are going to be fast and only invoke non-blocking operations. + +This is the model of execution that leads to best performance if your endpoints do not do any blocking +operation (such as blocking IO, blocking on an asynchronous operation, or sleeping). + +If your endpoint method needs to do any of those blocking operations, you should add the +https://javadoc.io/doc/io.smallrye.common/smallrye-common-annotation/1.5.0/io/smallrye/common/annotation/Blocking.html[`@Blocking`] +annotation on your endpoint and it will instead be invoked on a worker thread. Your endpoint method +code can remain exactly the same, and it will be allowed to block: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.smallrye.common.annotation.Blocking; + +@Path("yawn") +public class Endpoint { + + @Blocking + @GET + public String blockingHello() throws InterruptedException { + // do a blocking operation + Thread.sleep(1000); + return "Yaaaawwwwnnnnnn…"; + } +} +---- + +Most of the time, there are ways to achieve the same blocking operations in an asynchronous/reactive +way, using https://smallrye.io/smallrye-mutiny/[Mutiny], http://hibernate.org/reactive/[Hibernate Reactive] +or any of the link:getting-started-reactive[Quarkus Reactive extensions] for example: + +[source,java] +---- +package org.acme.rest; + +import java.time.Duration; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.smallrye.mutiny.Uni; + +@Path("yawn") +public class Endpoint { + + @GET + public Uni blockingHello() throws InterruptedException { + return Uni.createFrom().item("Yaaaawwwwnnnnnn…") + // do a non-blocking sleep + .onItem().delayIt().by(Duration.ofSeconds(2)); + } +} +---- + +=== Exception mapping + +If your application needs to return non-nominal HTTP codes in error cases, the best is +to throw exceptions that will result in the proper HTTP response being sent by the +framework using link:{jaxrsapi}/javax/ws/rs/WebApplicationException.html[`WebApplicationException`] or any of its subtypes: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.BadRequestException; +import javax.ws.rs.GET; +import javax.ws.rs.NotFoundException; +import javax.ws.rs.Path; + +@Path("fromages/{fromage}") +public class Endpoint { + + @GET + public String findFromage(String fromage) { + if(fromage == null) + // send a 400 + throw new BadRequestException(); + if(!fromage.equals("camembert")) + // send a 404 + throw new NotFoundException("Unknown cheese: " + fromage); + return "Camembert is a very nice cheese"; + } +} +---- + +If your endpoint method is delegating calls to another service layer which +does not know of JAX-RS, you need a way to turn service exceptions to an +HTTP response, and you can do that using the `@ServerExceptionMapper` annotation +on a method, with one parameter of the exception type you want to handle, and turning +that exception into a link:{jaxrsapi}/javax/ws/rs/core/Response.html[`Response`] (or a +link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`]): + +[source,java] +---- +package org.acme.rest; + +import java.util.Map; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.Response; + +import org.jboss.resteasy.reactive.server.ServerExceptionMapper; + +class UnknownCheeseException extends RuntimeException { + public final String name; + + public UnknownCheeseException(String name) { + this.name = name; + } +} + +@ApplicationScoped +class CheeseService { + private static final Map cheeses = + Map.of("camembert", "Camembert is a very nice cheese", + "gouda", "Gouda is acceptable too, especially with cumin"); + + public String findCheese(String name) { + String ret = cheeses.get(name); + if(ret != null) + return ret; + throw new UnknownCheeseException(name); + } +} + +@Path("fromages/{fromage}") +public class Endpoint { + + @Inject + CheeseService cheeses; + + @ServerExceptionMapper + public Response mapException(UnknownCheeseException x) { + return Response.status(Response.Status.NOT_FOUND) + .entity("Unknown cheese: " + x.name) + .build(); + } + + @GET + public String findFromage(String fromage) { + if(fromage == null) + // send a 400 + throw new BadRequestException(); + return cheeses.findCheese(fromage); + } +} +---- + +NOTE: exception mappers defined in REST endpoint classes will only be called if the +exception is thrown in the same class. If you want to define global exception mappers, +simply define them outside a REST endpoint class: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.core.Response; + +import org.jboss.resteasy.reactive.server.ServerExceptionMapper; + +class ExceptionMappers { + @ServerExceptionMapper + public Response mapException(UnknownCheeseException x) { + return Response.status(Response.Status.NOT_FOUND) + .entity("Unknown cheese: " + x.name) + .build(); + } +} +---- + +You can also declare link:{jaxrsspec}#exceptionmapper[exception mappers in the JAX-RS way]. + +Your exception mapper may declare any of the following parameter types: + +.Table Exception mapper parameters +|=== +|Type|Usage + +|An exception type +|Defines the exception type you want to handle + +|Any of the <> +| + +|link:{jaxrsapi}/javax/ws/rs/container/ContainerRequestContext.html[`ContainerRequestContext`] +|A context object to access the current request + +|=== + +It may declare any of the following return types: + +.Table Exception mapper return types +|=== +|Type|Usage + +|link:{jaxrsapi}/javax/ws/rs/core/Response.html[`Response`] +|The response to send to the client when the exception occurs + +|link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`] +|An asynchronous response to send to the client when the exception occurs + +|=== + +=== Request or response filters + +You can declare functions which are invoked in the following phases of the request processing: + +- Before the endpoint method is identified: pre-routing request filter +- After routing, but before the endpoint method is called: normal request filter +- After the endpoint method is called: response filter + +These filters allow you to do various things such as examine the request URI, +HTTP method, influence routing, look or change request headers, abort the request, +or modify the response. + +Request filters can be declared with the `@ServerRequestFilter` annotation: + +[source,java] +---- +class Filters { + + @ServerRequestFilter(preMatching = true) + public void preMatchingFilter(ContainerRequestContext requestContext) { + // make sure we don't lose cheese lovers + if("yes".equals(requestContext.getHeaderString("Cheese"))) { + requestContext.setRequestUri(URI.create("/cheese")); + } + } + + @ServerRequestFilter + public void getFilter(ContainerRequestContext ctx) { + // only allow GET methods for now + if(ctx.getMethod().equals(HttpMethod.GET)) { + ctx.abortWith(Response.status(Response.Status.METHOD_NOT_ALLOWED).build()); + } + } +} +---- + +Similarly, response filters can be declared with the `@ServerResponseFilter` annotation: + +[source,java] +---- +class Filters { + @ServerResponseFilter + public void getFilter(ContainerResponseContext responseContext) { + Object entity = responseContext.getEntity(); + if(entity instanceof String) { + // make it shout + responseContext.setEntity(((String)entity).toUpperCase()); + } + } +} +---- + +You can also link:{jaxrsspec}#filters[declare request and response filters in the JAX-RS way]. + +Your filters may declare any of the following parameter types: + +.Table Filter parameters +|=== +|Type|Usage + +|Any of the <> +| + +|link:{jaxrsapi}/javax/ws/rs/container/ContainerRequestContext.html[`ContainerRequestContext`] +|A context object to access the current request + +|link:{jaxrsapi}/javax/ws/rs/container/ContainerResponseContext.html[`ContainerResponseContext`] +|A context object to access the current response + +|link:{jdkapi}/java/lang/Throwable.html[`Throwable`] +|Any thrown exception, or `null` (only for response filters) + +|=== + +It may declare any of the following return types: + +.Table Filter return types +|=== +|Type|Usage + +|link:{jaxrsapi}/javax/ws/rs/core/Response.html[`Response`] +|The response to send to the client instead of continuing the filter chain, or `null` if the filter chain should proceed + +|link:{jdkapi}/java/util/Optional.html[`Optional`] +|An optional response to send to the client instead of continuing the filter chain, or an empty value if the filter chain should proceed + +|link:{mutinyapi}/io/smallrye/mutiny/Uni.html[`Uni`] +|An asynchronous response to send to the client instead of continuing the filter chain, or `null` if the filter chain should proceed + +|=== + +NOTE: You can restrict the Resource methods for which a filter runs, by using link:{jaxrsapi}/javax/ws/rs/NameBinding.html[`@NameBinding`] meta-annotations. + +=== Readers and Writers: mapping entities and HTTP bodies + +[[readers-writers]] + +Whenever your endpoint methods return a object (of when they return a +link:{jaxrsapi}/javax/ws/rs/core/Response.html[`Response`] with +an entity), RESTEasy Reactive will look for a way to map that into an HTTP response body. + +Similarly, whenever your endpoint method takes an object as parameter, we will look for +a way to map the HTTP request body into that object. + +This is done via a pluggable system of link:{jaxrsapi}/javax/ws/rs/ext/MessageBodyReader.html[`MessageBodyReader`] +and link:{jaxrsapi}/javax/ws/rs/ext/MessageBodyWriter.html[`MessageBodyWriter`] interfaces, +which are responsible for defining which Java type they map from/to, for which media types, +and how they turn HTTP bodies to/from Java instances of that type. + +For example, if we have our own `FroMage` type on our endpoint: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; + +class FroMage { + public String name; + + public FroMage(String name) { + this.name = name; + } +} + +@Path("cheese") +public class Endpoint { + + @GET + public FroMage sayCheese() { + return new FroMage("Cheeeeeese"); + } + + @PUT + public void addCheese(FroMage fromage) { + System.err.println("Received a new cheese: " + fromage.name); + } +} +---- + +Then we can define how to read and write it with our body reader/writers, annotated +with link:{jaxrsapi}/javax/ws/rs/ext/Provider.html[`@Provider`]: + +[source,java] +---- +package org.acme.rest; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.ext.MessageBodyReader; +import javax.ws.rs.ext.MessageBodyWriter; +import javax.ws.rs.ext.Provider; + +@Provider +public class FroMageBodyHandler implements MessageBodyReader, + MessageBodyWriter { + + @Override + public boolean isWriteable(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { + return type == FroMage.class; + } + + @Override + public void writeTo(FroMage t, Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, + OutputStream entityStream) + throws IOException, WebApplicationException { + entityStream.write(("[FroMageV1]" + t.name) + .getBytes(StandardCharsets.UTF_8)); + } + + @Override + public boolean isReadable(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { + return type == FroMage.class; + } + + @Override + public FroMage readFrom(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, + InputStream entityStream) + throws IOException, WebApplicationException { + String body = new String(entityStream.readAllBytes(), StandardCharsets.UTF_8); + if(body.startsWith("[FroMageV1]")) + return new FroMage(body.substring(11)); + throw new IOException("Invalid fromage: " + body); + } + +} +---- + +If you want to get the most performance our of your writer, you can extend `ServerMessageBodyWriter` instead +of link:{jaxrsapi}/javax/ws/rs/ext/MessageBodyWriter.html[`MessageBodyWriter`] +where you will be able to use less reflection and bypass the blocking IO layer: + +[source,java] +---- +package org.acme.rest; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.ext.MessageBodyReader; +import javax.ws.rs.ext.Provider; + +import org.jboss.resteasy.reactive.server.spi.ResteasyReactiveResourceInfo; +import org.jboss.resteasy.reactive.server.spi.ServerMessageBodyWriter; +import org.jboss.resteasy.reactive.server.spi.ServerRequestContext; + +@Provider +public class FroMageBodyHandler implements MessageBodyReader, + ServerMessageBodyWriter { + + // … + + @Override + public boolean isWriteable(Class type, ResteasyReactiveResourceInfo target, + MediaType mediaType) { + return type == FroMage.class; + } + + @Override + public void writeResponse(FroMage t, ServerRequestContext context) + throws WebApplicationException, IOException { + context.serverResponse().end("[FroMageV1]" + t.name); + } +} +---- + +NOTE: You can restrict which content-types your reader/writer apply to by adding +link:{jaxrsapi}/javax/ws/rs/Consumes.html[`Consumes`]/link:{jaxrsapi}/javax/ws/rs/Produces.html[`Produces`] annotations +on your provider class. + +=== Reader and Writer interceptors + +Just as you can intercept requests and responses, you can also intercept readers and writers, by +extending the link:{jaxrsapi}/javax/ws/rs/ext/ReaderInterceptor.html[`ReaderInterceptor`] or +link:{jaxrsapi}/javax/ws/rs/ext/WriterInterceptor.html[`WriterInterceptor`] on a class annotated with +link:{jaxrsapi}/javax/ws/rs/ext/Provider.html[`@Provider`]. + +If we look at this endpoint: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; + +@Path("cheese") +public class Endpoint { + + @GET + public String sayCheese() { + return "Cheeeeeese"; + } + + @PUT + public void addCheese(String fromage) { + System.err.println("Received a new cheese: " + fromage); + } +} +---- + +We can add reader and writer interceptors like this: + +[source,java] +---- +package org.acme.rest; + +import java.io.IOException; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.ext.Provider; +import javax.ws.rs.ext.ReaderInterceptor; +import javax.ws.rs.ext.ReaderInterceptorContext; +import javax.ws.rs.ext.WriterInterceptor; +import javax.ws.rs.ext.WriterInterceptorContext; + +@Provider +public class FroMageIOInterceptor implements ReaderInterceptor, WriterInterceptor { + + @Override + public void aroundWriteTo(WriterInterceptorContext context) + throws IOException, WebApplicationException { + System.err.println("Before writing " + context.getEntity()); + context.proceed(); + System.err.println("After writing " + context.getEntity()); + } + + @Override + public Object aroundReadFrom(ReaderInterceptorContext context) + throws IOException, WebApplicationException { + System.err.println("Before reading " + context.getGenericType()); + Object entity = context.proceed(); + System.err.println("After reading " + entity); + return entity; + } +} +---- + +=== Parameter mapping + +All <> can be declared as link:{jdkapi}/java/lang/String.html[`String`], but also +any of the following types: + +- Types for which a link:{jaxrsapi}/javax/ws/rs/ext/ParamConverter.html[`ParamConverter`] is available via a registered +link:{jaxrsapi}/javax/ws/rs/ext/ParamConverterProvider.html[`ParamConverterProvider`]. +- Primitive types. +- Types that have a constructor that accepts a single link:{jdkapi}/java/lang/String.html[`String`] argument. +- Types that have a static method named `valueOf` or `fromString` with a single link:{jdkapi}/java/lang/String.html[`String`] argument +that return an instance of the type. If both methods are present then `valueOf` will be used unless +the type is an `enum` in which case `fromString` will be used. +- link:{jdkapi}/java/util/List.html[`List`], link:{jdkapi}/java/util/Set.html[`Set`], or +link:{jdkapi}/java/util/SortedSet.html[`SortedSet`], where `T` satisfies any above criterion. + +The following example illustrates all those possibilities: + +[source,java] +---- +package org.acme.rest; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.util.List; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.ext.ParamConverter; +import javax.ws.rs.ext.ParamConverterProvider; +import javax.ws.rs.ext.Provider; + +import org.jboss.resteasy.reactive.RestQuery; + +@Provider +class MyConverterProvider implements ParamConverterProvider { + + @Override + public ParamConverter getConverter(Class rawType, Type genericType, + Annotation[] annotations) { + // declare a converter for this type + if(rawType == Converter.class) { + return (ParamConverter) new MyConverter(); + } + return null; + } + +} + +// this is my custom converter +class MyConverter implements ParamConverter { + + @Override + public Converter fromString(String value) { + return new Converter(value); + } + + @Override + public String toString(Converter value) { + return value.value; + } + +} + +// this uses a converter +class Converter { + String value; + Converter(String value) { + this.value = value; + } +} + +class Constructor { + String value; + // this will use the constructor + public Constructor(String value) { + this.value = value; + } +} + +class ValueOf { + String value; + private ValueOf(String value) { + this.value = value; + } + // this will use the valueOf method + public static ValueOf valueOf(String value) { + return new ValueOf(value); + } +} + +@Path("hello") +public class Endpoint { + + @Path("{converter}/{constructor}/{primitive}/{valueOf}") + @GET + public String convertions(Converter converter, Constructor constructor, + int primitive, ValueOf valueOf, + @RestQuery List list){ + return converter + "/" + constructor + "/" + primitive + + "/" + valueOf + "/" + list; + } +} +---- + +=== Preconditions + +https://tools.ietf.org/html/rfc7232[HTTP allows requests to be conditional], based on a number of +conditions, such as: + +- Date of last resource modification +- A resource tag, similar to a hash code of the resource to designate its state or version + +Let's see how you can do conditional request validation using the +link:{jaxrsapi}/javax/ws/rs/core/Request.html[`Request`] context object: + +[source,java] +---- +package org.acme.rest; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.time.temporal.TemporalUnit; +import java.util.Date; + +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.core.EntityTag; +import javax.ws.rs.core.Request; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.ResponseBuilder; + +@Path("conditional") +public class Endpoint { + + // It's important to keep our date on seconds because that's how it's sent to the + // user in the Last-Modified header + private Date date = Date.from(Instant.now().truncatedTo(ChronoUnit.SECONDS)); + private int version = 1; + private EntityTag tag = new EntityTag("v1"); + private String resource = "Some resource"; + + @GET + public Response get(Request request){ + // first evaluate preconditions + ResponseBuilder conditionalResponse = request.evaluatePreconditions(date, tag); + if(conditionalResponse != null) + return conditionalResponse.build(); + // preconditions are OK + return Response.ok(resource) + .lastModified(date) + .tag(tag) + .build(); + } + + @PUT + public Response put(Request request, String body){ + // first evaluate preconditions + ResponseBuilder conditionalResponse = request.evaluatePreconditions(date, tag); + if(conditionalResponse != null) + return conditionalResponse.build(); + // preconditions are OK, we can update our resource + resource = body; + date = Date.from(Instant.now().truncatedTo(ChronoUnit.SECONDS)); + version++; + tag = new EntityTag("v" + version); + return Response.ok(resource) + .lastModified(date) + .tag(tag) + .build(); + } +} +---- + +When we call `GET /conditional` the first time, we will get this response: + +[source] +---- +HTTP/1.1 200 OK +Content-Type: text/plain;charset=UTF-8 +ETag: "v1" +Last-Modified: Wed, 09 Dec 2020 16:10:19 GMT +Content-Length: 13 + +Some resource +---- + +So now if we want to check if we need to fetch a new version, we can make the following request: + +[source] +---- +GET /conditional HTTP/1.1 +Host: localhost:8080 +If-Modified-Since: Wed, 09 Dec 2020 16:10:19 GMT +---- + +And we would get the following response: + +[source] +---- +HTTP/1.1 304 Not Modified +---- + +Because the resource has not been modified since that date. This saves on sending the resource, +but can also help your users detect concurrent modification, for example, let's suppose that one +client wants to update the resource, but another user has modified it since. You can follow the +previous `GET` request with this update: + +[source] +---- +PUT /conditional HTTP/1.1 +Host: localhost:8080 +If-Unmodified-Since: Wed, 09 Dec 2020 16:25:43 GMT +If-Match: v1 +Content-Length: 8 +Content-Type: text/plain + +newstuff +---- + +And if some other user has modified the resource between your `GET` and your `PUT` you would +get this answer back: + +[source] +---- +HTTP/1.1 412 Precondition Failed +ETag: "v2" +Content-Length: 0 +---- + +=== Negotiation + +One of the main ideas of REST (https://tools.ietf.org/html/rfc7231#section-3.4[and HTTP]) is that +your resource is independent from its representation, and +that both the client and server are free to represent their resources in as many media types as +they want. This allows the server to declare support for multiple representations and let the +client declare which ones it supports and get served something appropriate. + +The following endpoint supports serving cheese in plain text or JSON: + +[source,java] +---- +package org.acme.rest; + +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import com.fasterxml.jackson.annotation.JsonCreator; + +class FroMage { + public String name; + @JsonCreator + public FroMage(String name) { + this.name = name; + } + @Override + public String toString() { + return "Cheese: " + name; + } +} + +@Path("negotiated") +public class Endpoint { + + @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN}) + @GET + public FroMage get(){ + return new FroMage("Morbier"); + } + + @Consumes(MediaType.TEXT_PLAIN) + @PUT + public FroMage putString(String cheese){ + return new FroMage(cheese); + } + + @Consumes(MediaType.APPLICATION_JSON) + @PUT + public FroMage putJson(FroMage fromage){ + return fromage; + } +} +---- + +The user will be able to select which representation it gets with the +link:{httpspec}#section-5.3.2[`Accept`] header, in the case of JSON: + +[source,sh] +---- +> GET /negotiated HTTP/1.1 +> Host: localhost:8080 +> Accept: application/json + +< HTTP/1.1 200 OK +< Content-Type: application/json +< Content-Length: 18 +< +< {"name":"Morbier"} +---- + +And for text: + +[source,sh] +---- +> GET /negotiated HTTP/1.1 +> Host: localhost:8080 +> Accept: text/plain +> +< HTTP/1.1 200 OK +< Content-Type: text/plain +< Content-Length: 15 +< +< Cheese: Morbier +---- + +Similarly, you can `PUT` two different representations. JSON: + +[source,sh] +---- +> PUT /negotiated HTTP/1.1 +> Host: localhost:8080 +> Content-Type: application/json +> Content-Length: 16 +> +> {"name": "brie"} + +< HTTP/1.1 200 OK +< Content-Type: application/json;charset=UTF-8 +< Content-Length: 15 +< +< {"name":"brie"} +---- + +Or plain text: + +[source,sh] +---- +> PUT /negotiated HTTP/1.1 +> Host: localhost:8080 +> Content-Type: text/plain +> Content-Length: 9 +> +> roquefort + +< HTTP/1.1 200 OK +< Content-Type: application/json;charset=UTF-8 +< Content-Length: 20 +< +< {"name":"roquefort"} +---- + +== Include/Exclude JAX-RS classes with build time conditions + +Quarkus enables the inclusion or exclusion of JAX-RS Resources, Providers and Features directly thanks to build time conditions in the same that it does for CDI beans. +Thus, the various JAX-RS classes can be annotated with profile conditions (`@io.quarkus.arc.profile.IfBuildProfile` or `@io.quarkus.arc.profile.UnlessBuildProfile`) and/or with property conditions (`io.quarkus.arc.properties.IfBuildProperty` or `io.quarkus.arc.properties.UnlessBuildProperty`) to indicate to Quarkus at build time under which conditions these JAX-RS classes should be included. + +In the following example, Quarkus includes the endpoint `sayHello` if and only if the build profile `app1` has been enabled. + +[source,java] +---- +@IfBuildProfile("app1") +public class ResourceForApp1Only { + + @GET + @Path("sayHello") + public String sayHello() { + return "hello"; + } +} +---- + +Please note that if a JAX-RS Application has been detected and the method `getClasses()` and/or `getSingletons()` has/have been overridden, Quarkus will ignore the build time conditions and consider only what has been defined in the JAX-RS Application. + + +== RESTEasy Reactive client +In addition to the Server side, RESTEasy Reactive comes with a new MicroProfile Rest Client implementation that is non-blocking at its core. + +To use it, add the `quarkus-rest-client-reactive` extension to your project. +The client can be used as described in https://quarkus.io/guides/rest-client[Using the REST Client guide] and specified in the https://download.eclipse.org/microprofile/microprofile-rest-client-1.4.1/microprofile-rest-client-1.4.1.html[MicroProfile Rest Client specification], with a few exceptions: + +- the extension name +- the default scope of the client for the new extension is `@ApplicationScoped` while the `quarkus-rest-client` defaults to `@Dependent` +To change this behavior, set the `quarkus.rest.client.scope` property to the fully qualified scope name. +- it is not possible to set `HostnameVerifier` or `SSLContext` +- a few things that don't make sense for a non-blocking implementations, such as setting the `ExecutorService`, don't work + +It is important to note that the `quarkus-rest-client` extension may not work properly with RESTEasy Reactive. + diff --git a/docs/src/main/asciidoc/scheduler-reference.adoc b/docs/src/main/asciidoc/scheduler-reference.adoc index 4632a22bc01ba..a67d81e785c2e 100644 --- a/docs/src/main/asciidoc/scheduler-reference.adoc +++ b/docs/src/main/asciidoc/scheduler-reference.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Scheduler Reference Guide @@ -99,6 +99,15 @@ Sometimes a possibility to specify an explicit id may come in handy. void myMethod() { } ---- +If a value starts with `{` and ends with `}` then the scheduler attempts to find a corresponding config property and use the configured value instead. + +.Interval Config Property Example +[source,java] +---- +@Scheduled(identity = "{myMethod.identity.expr}") +void myMethod() { } +---- + === Delayed Execution `@Scheduled` provides two ways to delay the time a trigger should start firing at. @@ -182,7 +191,7 @@ class MyService { == Programmatic Scheduling -If you need to schedule a job programmatically you'll need to add the link:quartz[Quartz extension] and use the Quartz API direcly. +If you need to schedule a job programmatically you'll need to add the link:quartz[Quartz extension] and use the Quartz API directly. .Programmatic Scheduling with Quartz API [source,java] diff --git a/docs/src/main/asciidoc/scheduler.adoc b/docs/src/main/asciidoc/scheduler.adoc index 9fab6275c102f..d00951e91fc5a 100644 --- a/docs/src/main/asciidoc/scheduler.adoc +++ b/docs/src/main/asciidoc/scheduler.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Scheduling Periodic Tasks @@ -41,7 +41,7 @@ The solution is located in the `scheduler-quickstart` {quickstarts-tree-url}/sch First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -134,7 +134,7 @@ it should be invoked as soon as possible. The invocation of the scheduled method == Updating the application configuration file Edit the `application.properties` file and add the `cron.expr` configuration: -[source,shell] +[source,properties] ---- # By default, the syntax used for cron expressions is based on Quartz - http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html # You can change the syntax using the following property: @@ -210,7 +210,7 @@ After a few seconds, re-run `curl localhost:8080/count` to verify the counter ha Observe the console to verify that the message `Cron expression configured in application.properties` has been displayed indicating that the cron job using an expression configured in `application.properties` has been triggered. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. [[scheduler-configuration-reference]] diff --git a/docs/src/main/asciidoc/scripting.adoc b/docs/src/main/asciidoc/scripting.adoc new file mode 100644 index 0000000000000..c7b8c7f1e6caf --- /dev/null +++ b/docs/src/main/asciidoc/scripting.adoc @@ -0,0 +1,441 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Scripting with Quarkus +include::./attributes.adoc[] +:extension-status: preview + +Quarkus provides integration with https://jbang.dev[jbang] which allows you to write Java scripts/applications requiring no Maven nor Gradle to get running. + +In this guide, we will see how you can write a REST application using just a single Java file. + +include::./status-include.adoc[] + +== Prerequisites + +To complete this guide, you need: + +* less than 5 minutes +* https://jbang.dev/download[jbang v0.40.3+] +* an IDE +* GraalVM installed if you want to run in native mode + +== Solution + +Normally we would link to a Git repository to clone but in this case there is no additional files than the following: + +[source,java,subs=attributes+] +---- +//usr/bin/env jbang "$0" "$@" ; exit $? +//DEPS io.quarkus:quarkus-resteasy:{quarkus-version} +//JAVAC_OPTIONS -parameters +//JAVA_OPTIONS -Djava.util.logging.manager=org.jboss.logmanager.LogManager + +import io.quarkus.runtime.Quarkus; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import org.jboss.resteasy.annotations.jaxrs.PathParam; +import org.jboss.logging.Logger; + +@Path("/hello") +@ApplicationScoped +public class quarkusapp { + + @GET + public String sayHello() { + return "hello"; + } + + public static void main(String[] args) { + Quarkus.run(args); + } + + @Inject + GreetingService service; + + @GET + @Produces(MediaType.TEXT_PLAIN) + @Path("/greeting/{name}") + public String greeting(@PathParam String name) { + return service.greeting(name); + } + + @ApplicationScoped + static public class GreetingService { + + public String greeting(String name) { + return "hello " + name; + } + } +} +---- + +== Architecture + +In this guide, we create a straightforward application serving a `hello` endpoint with a single source file, no additional build files like `pom.xml` or `build.gradle` needed. To demonstrate dependency injection, this endpoint uses a `greeting` bean. + +image::getting-started-architecture.png[alt=Architecture] + +== Creating the initial file + +First, we need a Java file. jbang lets you create an initial version using: + +[source,bash,subs=attributes+] +---- +jbang init scripting/quarkusapp.java +cd scripting +---- + +This command generates a .java file that you can directly run on Linux and macOS, i.e. `./quarkusapp.java` - on Windows you need to use `jbang quarkusapp.java`. + +This initial version will print `Hello World` when run. + +Once generated, look at the `quarkusapp.java`. + +You will find at the top a line looking like this: + +[source,java] +---- +//usr/bin/env jbang "$0" "$@" ; exit $? +---- + +This line is what on Linux and macOS allows you to run it as a script. On Windows this line is ignored. + +The next line + +[source,java] +---- +// //DEPS +---- + +Is illustrating how you add dependencies to this script. This is a feature of `jbang`. + +Go ahead and update this line to include the `quarkus-resteasy` dependency like so: + +[source,java,subs=attributes+] +---- +//DEPS io.quarkus:quarkus-resteasy:{quarkus-version} +---- + +Now, run `jbang quarkusapp.java` and you will see `jbang` resolving this dependency and building the jar with help from Quarkus' jbang integration. + +[source,shell,subs=attributes+] +---- +$ jbang quarkusapp.java + +[jbang] Resolving dependencies... +[jbang] Resolving io.quarkus:quarkus-resteasy:{quarkus-version}...Done +[jbang] Dependencies resolved +[jbang] Building jar... +[jbang] Post build with io.quarkus.launcher.JBangIntegration +Aug 30, 2020 5:40:55 AM org.jboss.threads.Version +INFO: JBoss Threads version 3.1.1.Final +Aug 30, 2020 5:40:56 AM io.quarkus.deployment.QuarkusAugmentor run +INFO: Quarkus augmentation completed in 722ms +Hello World +---- + +For now the application does nothing new. + +[TIP] +.How do I edit this file and get content assist? +==== +As there is nothing but a `.java` file, most IDE's don't handle content assist well. +To work around that you can run `jbang edit quarkusapp.java`, this will print out a directory that will have a temporary project setup you can use in your IDE. + +On Linux/macOS you can run ` `jbang edit quarkusapp.java``. + +If you add dependencies while editing you can get jbang to automatically refresh +the IDE project using `jbang edit --live= quarkusapp.java`. +==== + + +=== The JAX-RS resources + +Now let us replace the class with one that uses Quarkus features: + +[source,java] +---- +import io.quarkus.runtime.Quarkus; +import javax.enterprise.context.ApplicationScoped; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +@Path("/hello") +@ApplicationScoped +public class quarkusapp { + + @GET + public String sayHello() { + return "hello"; + } + + public static void main(String[] args) { + Quarkus.run(args); + } +} +---- + +It's a very simple class with a main method that starts Quarkus with a REST endpoint, returning "hello" to requests on "/hello". + +[TIP] +.Why is the `main` method there? +==== +A `main` method is currently needed for the `jbang` integration to work - we might remove this requirement in the future. +==== + +== Running the application + +Now when you run the application you will see Quarkus start up. + +Use: `jbang quarkusapp.java`: + +[source,shell,subs=attributes+] +---- +$ jbang quarkusapp.java + +[jbang] Building jar... +[jbang] Post build with io.quarkus.launcher.JBangIntegration +Aug 30, 2020 5:49:01 AM org.jboss.threads.Version +INFO: JBoss Threads version 3.1.1.Final +Aug 30, 2020 5:49:02 AM io.quarkus.deployment.QuarkusAugmentor run +INFO: Quarkus augmentation completed in 681ms +__ ____ __ _____ ___ __ ____ ______ + --/ __ \/ / / / _ | / _ \/ //_/ / / / __/ + -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \ +--\___\_\____/_/ |_/_/|_/_/|_|\____/___/ +2020-08-30 05:49:03,255 INFO [io.quarkus] (main) Quarkus {quarkus-version} on JVM started in 0.638s. Listening on: http://0.0.0.0:8080 +2020-08-30 05:49:03,272 INFO [io.quarkus] (main) Profile prod activated. +2020-08-30 05:49:03,272 INFO [io.quarkus] (main) Installed features: [cdi, resteasy] +---- + +Once started, you can request the provided endpoint: + +[source,shell] +---- +$ curl -w "\n" http://localhost:8080/hello +hello +---- + +After that, hit `CTRL+C` to stop the application. + +[TIP] +.Automatically add newline with `curl -w "\n"` +==== +We are using `curl -w "\n"` in this example to avoid your terminal printing a '%' or put both result and next command prompt on the same line. +==== + +[TIP] +.Why is `quarkus-resteasy` not resolved? +==== +In this second run you should not see a line saying it is resolving `quarkus-resteasy` as jbang caches the dependency resolution between runs. +If you want to clear the caches to force resolution use `jbang cache clear`. +==== + +== Using injection + +Dependency injection in Quarkus is based on ArC which is a CDI-based dependency injection solution tailored for Quarkus' architecture. +You can learn more about it in the link:cdi-reference[Contexts and Dependency Injection guide]. + +ArC comes as a dependency of `quarkus-resteasy` so you already have it handy. + +Let's modify the application and add a companion bean. + +Normally you would add a separate class, but as we are aiming to have it all in one file you will add a +nested class. + +Add the following *inside* the `quarkusapp` class body. + +[source, java] +---- +@ApplicationScoped +static public class GreetingService { + + public String greeting(String name) { + return "hello " + name; + } + +} +---- + +[TIP] +.Use of nested static public classes +==== +We are using a nested static public class instead of a top level class for two reasons: + +. jbang currently does not support multiple source files. +. All Java frameworks relying on introspection have challenges using top level classes as they are not as visible as public classes; and in Java there can only be one top level public class in a file. + +==== + +Edit the `quarksapp` class to inject the `GreetingService` and create a new endpoint using it, you should end up with something like: + +[source,java,subs=attributes+] +---- +//usr/bin/env jbang "$0" "$@" ; exit $? +//DEPS io.quarkus:quarkus-resteasy:{quarkus-version} + +import io.quarkus.runtime.Quarkus; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import org.jboss.resteasy.annotations.jaxrs.PathParam; + +@Path("/hello") +@ApplicationScoped +public class quarkusapp { + + @GET + public String sayHello() { + return "hello from Quarkus with jbang.dev"; + } + + public static void main(String[] args) { + Quarkus.run(args); + } + + @Inject + GreetingService service; + + @GET + @Produces(MediaType.TEXT_PLAIN) + @Path("/greeting/{name}") + public String greeting(@PathParam String name) { + return service.greeting(name); + } + + @ApplicationScoped + static public class GreetingService { + + public String greeting(String name) { + return "hello " + name; + } + } +} +---- + +Now when you run `jbang quarkusapp.java` you can check what the new end point returns: + +[source,shell,subs=attributes+] +---- +$ curl -w "\n" http://localhost:8080/hello/greeting/quarkus +hello null +---- + +Now that is unexpected, why is it returning `hello null` and not `hello quarkus`? + +The reason is that JAX-RS `@PathParam` relies on the `-parameters` compiler flag to be set to be able to map `{name}` to the `name` parameter. + +We fix that by adding the following comment instruction to the file: + +[source,java,subs=attributes+] +---- +//JAVAC_OPTIONS -parameters +---- + +Now when you run with `jbang quarkusapp.java` the end point should return what you expect: + +[source,shell,subs=attributes+] +---- +$ curl -w "\n" http://localhost:8080/hello/greeting/quarkus +hello quarkus +---- + +== Debugging + +To debug the application you use `jbang --debug quarkusapp.java` and you can use your IDE to connect on port 4004; if you want to use the +more traditional Quarkus debug port you can use `jbang --debug=5005 quarkusapp.java`. + +Note: `jbang` debugging always suspends thus you need to connect the debugger to have the application run. + +== Logging + +To use logging in Quarkus scripting with jbang you do as usual, with configuring a logger, i.e. + +[source,java] +---- +public static final Logger LOG = Logger.getLogger(quarkusapp.class); +---- + +To get it to work you need to add a Java option to ensure the logging is initialized properly, i.e. + +[source,java] +---- +//JAVA_OPTIONS -Djava.util.logging.manager=org.jboss.logmanager.LogManager +---- + +With that in place running `jbang quarkusapp.java` will log and render as expected. + +== Configuring Application + +You can use `//Q:CONFIG =` to set up static configuration for your application. + +I.e. if you wanted to add the `smallrye-openapi` and `swagger-ui` extensions and have the Swagger UI always show up you would add the following: + +[source,java,subs=attributes+] +---- +//DEPS io.quarkus:quarkus-smallrye-openapi:{quarkus-version} +//DEPS io.quarkus:quarkus-swagger-ui:{quarkus-version} +//Q:CONFIG quarkus.swagger-ui.always-include=true +---- + +Now during build the `quarkus.swagger-ui.always-include` will be generated into the resulting jar and `http://0.0.0.0:8080/q/swagger-ui` will be available when run. + +== Running as a native application + +If you have the `native-image` binary installed and `GRAALVM_HOME` set, you can get the native executable built and run using `jbang --native quarkusapp.java`: + +[source,shell,subs=attributes+] +---- +$ jbang --native quarkusapp + +[jbang] Building jar... +[jbang] Post build with io.quarkus.launcher.JBangIntegration +Aug 30, 2020 6:21:15 AM org.jboss.threads.Version +INFO: JBoss Threads version 3.1.1.Final +Aug 30, 2020 6:21:16 AM io.quarkus.deployment.pkg.steps.JarResultBuildStep buildNativeImageThinJar +INFO: Building native image source jar: /var/folders/yb/sytszfld4sg8vwr1h0w20jlw0000gn/T/quarkus-jbang3291688251685023074/quarkus-application-native-image-source-jar/quarkus-application-runner.jar +Aug 30, 2020 6:21:16 AM io.quarkus.deployment.pkg.steps.NativeImageBuildStep build +INFO: Building native image from /var/folders/yb/sytszfld4sg8vwr1h0w20jlw0000gn/T/quarkus-jbang3291688251685023074/quarkus-application-native-image-source-jar/quarkus-application-runner.jar +Aug 30, 2020 6:21:16 AM io.quarkus.deployment.pkg.steps.NativeImageBuildStep checkGraalVMVersion +INFO: Running Quarkus native-image plugin on GraalVM Version 20.1.0 (Java Version 11.0.7) +Aug 30, 2020 6:21:16 AM io.quarkus.deployment.pkg.steps.NativeImageBuildStep build +INFO: /Users/max/.sdkman/candidates/java/20.1.0.r11-grl/bin/native-image -J-Djava.util.logging.manager=org.jboss.logmanager.LogManager -J-Dsun.nio.ch.maxUpdateArraySize=100 -J-Dvertx.logger-delegate-factory-class-name=io.quarkus.vertx.core.runtime.VertxLogDelegateFactory -J-Dvertx.disableDnsResolver=true -J-Dio.netty.leakDetection.level=DISABLED -J-Dio.netty.allocator.maxOrder=1 -J-Duser.language=en -J-Dfile.encoding=UTF-8 --initialize-at-build-time= -H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy\$BySpaceAndTime -H:+JNI -jar quarkus-application-runner.jar -H:FallbackThreshold=0 -H:+ReportExceptionStackTraces -H:-AddAllCharsets -H:EnableURLProtocols=http --no-server -H:-UseServiceLoaderFeature -H:+StackTrace quarkus-application-runner + +Aug 30, 2020 6:22:31 AM io.quarkus.deployment.QuarkusAugmentor run +INFO: Quarkus augmentation completed in 76010ms +__ ____ __ _____ ___ __ ____ ______ + --/ __ \/ / / / _ | / _ \/ //_/ / / / __/ + -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \ +--\___\_\____/_/ |_/_/|_/_/|_|\____/___/ +2020-08-30 06:22:32,012 INFO [io.quarkus] (main) Quarkus {quarkus-version} native started in 0.017s. Listening on: http://0.0.0.0:8080 +2020-08-30 06:22:32,013 INFO [io.quarkus] (main) Profile prod activated. +2020-08-30 06:22:32,013 INFO [io.quarkus] (main) Installed features: [cdi, resteasy] +---- + +This native build will take some time on first run but any subsequent runs (without changing `quarkusapp.java`) will be close to instant thanks to jbang cache: + +[source,shell,subs=attributes+] +---- +$ jbang --native quarkusapp.java +__ ____ __ _____ ___ __ ____ ______ + --/ __ \/ / / / _ | / _ \/ //_/ / / / __/ + -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \ +--\___\_\____/_/ |_/_/|_/_/|_|\____/___/ +2020-08-30 06:23:36,846 INFO [io.quarkus] (main) Quarkus {quarkus-version} native started in 0.015s. Listening on: http://0.0.0.0:8080 +2020-08-30 06:23:36,846 INFO [io.quarkus] (main) Profile prod activated. +2020-08-30 06:23:36,846 INFO [io.quarkus] (main) Installed features: [cdi, resteasy] +---- + +=== Conclusion + +If you want to get started with Quarkus or write something quickly, Quarkus Scripting with jbang lets you do that. No Maven, no Gradle - just a Java file. In this guide we outlined the very basics on using Quarkus with jbang; if you want to learn more about what jbang can do, go see https://jbang.dev. diff --git a/docs/src/main/asciidoc/security-authorization.adoc b/docs/src/main/asciidoc/security-authorization.adoc index 3eb27fb1de816..fde71e04b0bd4 100644 --- a/docs/src/main/asciidoc/security-authorization.adoc +++ b/docs/src/main/asciidoc/security-authorization.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Authorization of Web Endpoints @@ -152,6 +152,20 @@ That is if the security annotations do not define the access control. Defaults t and JAX-RS endpoints that do not have security annotations but are defined in classes that contain methods with security annotations. Defaults to `false`. +=== Disabling permissions + +Permissions can be disabled at build time with an `enabled` property for each declared permission, for example: + +[source,properties] +---- +quarkus.http.auth.permission.permit1.enabled=false +quarkus.http.auth.permission.permit1.paths=/public/*,/css/*,/js/*,/robots.txt +quarkus.http.auth.permission.permit1.policy=permit +quarkus.http.auth.permission.permit1.methods=GET,HEAD +---- + +and enabled at runtime with a system property or environment variable, for example: `-Dquarkus.http.auth.permission.permit1.enabled=true`. + [#standard-security-annotations] == Authorization using Annotations @@ -211,3 +225,7 @@ public class SubjectExposingResource { <3> The `/subject/unsecured` endpoint allows for unauthenticated access by specifying the `@PermitAll` annotation. <4> This call to obtain the user principal will return null if the caller is unauthenticated, non-null if the caller is authenticated. <5> The `/subject/denied` endpoint disallows any access regardless of whether the call is authenticated by specifying the `@DenyAll` annotation. + +== References + +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-built-in-authentication.adoc b/docs/src/main/asciidoc/security-built-in-authentication.adoc index 4c43d67acd3d0..eb7fd2bc21d83 100644 --- a/docs/src/main/asciidoc/security-built-in-authentication.adoc +++ b/docs/src/main/asciidoc/security-built-in-authentication.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Built-In Authentication Support @@ -17,6 +17,8 @@ that provides a username/password based `IdentityProvider`, such as link:securit Please see link:security#identity-providers[Security Identity Providers] for more information. +Please also see link:security-testing#configuring-user-information[Configuring User Information in application.properties] section. + [[form-auth]] == Form Based Authentication @@ -28,7 +30,7 @@ be read by all members of the cluster (provided they all share the same encrypti The encryption key can be set using the `quarkus.http.auth.session.encryption-key` property, and it must be at least 16 characters long. This key is hashed using SHA-256 and the resulting digest is used as a key for AES-256 encryption of the cookie value. This cookie contains an expiry time as part of the encrypted value, so all nodes in the cluster must have their -clocks synchronised. At one minute intervals a new cookie will be generated with an updated expiry time if the session +clocks synchronized. At one minute intervals a new cookie will be generated with an updated expiry time if the session is in use. The following properties can be used to configure form based auth: @@ -40,7 +42,7 @@ include::{generated-dir}/config/quarkus-vertx-http-config-group-form-auth-config Quarkus provides mTLS authentication so that you can authenticate users based on their X.509 certificates. -To use this authentication method, you should first enable SSL for your application. For more details, check the link:http-reference[Supporting secure connections with SSL] guide. +To use this authentication method, you should first enable SSL for your application. For more details, check the link:http-reference#ssl[Supporting secure connections with SSL] guide. Once your application is accepting secure connections, the next step is to configure a `quarkus.http.ssl.certificate.trust-store-file` holding all the certificates that your application should trust as well as how your application should ask for certificates when @@ -87,10 +89,18 @@ You should also be able to get the certificate as follows: .Obtaining the certificate [source,java] ---- +import java.security.cert.X509Certificate; +import io.quarkus.security.credential.CertificateCredential; + CertificateCredential credential = identity.getCredential(CertificateCredential.class); X509Certificate certificate = credential.getCertificate(); ---- +=== Authorization + +The information from the client certificate can be used to enhance Quarkus `SecurityIdentity`. For example, one can add new roles after checking a client certificate subject name, etc. +Please see the link:security-customization#security-identity-customization[SecurityIdentity Customization] section for more information about customizing Quarkus `SecurityIdentity`. + [[proactive-authentication]] == Proactive Authentication @@ -98,5 +108,26 @@ By default Quarkus does what we call proactive authentication. This means that i credential then that request will always be authenticated (even if the target page does not require authentication). This means that requests with an invalid credential will always be rejected, even for public pages. You can change -this behaviour and only authenticate when required by setting `quarkus.http.auth.proactive=false`. +this behavior and only authenticate when required by setting `quarkus.http.auth.proactive=false`. + +If you disable proactive authentication then the authentication process will only be run when an identity is requested, +either because there are security rules that requires the user to be authenticated, or due to programatic access to the +current identity. + +Note that if proactive authentication is in use accessing the `SecurityIdentity` is a blocking operation. This is because +authentication may not have happened yet, and accessing it may require calls to external systems such as databases that +may block. For blocking applications this is no problem, however if you are have disabled authentication in a reactive +application this will fail (as you cannot do blocking operations on the IO thread). To work around this you need to +`@Inject` an instance of `io.quarkus.security.identity.CurrentIdentityAssociation`, and call the +`Uni getDeferredIdentity();` method. You can then subscribe to the resulting `Uni` and will be notified +when authentication is complete and the identity is available. + +[NOTE] +==== +By default the authentication security constraints are enforced before the JAX-RS chain starts. +Disabling the proactive authentication effectively shifts this process to the moment when the JAX-RS chain starts running thus making it possible to use JAX-RS `ExceptionMapper` to capture Quarkus Security authentication exceptions such as `io.quarkus.security.AuthenticationFailedException` if required. +==== + +== References +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-customization.adoc b/docs/src/main/asciidoc/security-customization.adoc index 458dcac1034a1..07b588c00a78b 100644 --- a/docs/src/main/asciidoc/security-customization.adoc +++ b/docs/src/main/asciidoc/security-customization.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Security Tips and Tricks @@ -48,6 +48,7 @@ public class CustomAwareJWTAuthMechanism implements HttpAuthenticationMechanism } ---- +[[security-identity-customization]] == Security Identity Customization Internally, the identity providers create and update an instance of the `io.quarkus.security.identity.SecurityIdentity` class which holds the principal, roles, credentials which were used to authenticate the client (user) and other security attributes. An easy option to customize `SecurityIdentity` is to register a custom `SecurityIdentityAugmentor`. For example, the augmentor below adds an addition role: @@ -66,26 +67,20 @@ import java.util.function.Supplier; @ApplicationScoped public class RolesAugmentor implements SecurityIdentityAugmentor { - @Override - public int priority() { - return 0; - } - @Override public Uni augment(SecurityIdentity identity, AuthenticationRequestContext context) { - return context.runBlocking(build(identity)); + return Uni.createFrom().item(build(identity)); + + // Do 'return context.runBlocking(build(identity));' + // if a blocking call is required to customize the identity } private Supplier build(SecurityIdentity identity) { if(identity.isAnonymous()) { return () -> identity; } else { - // create a new builder and copy principal, attributes, credentials and roles from the original - QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder() - .setPrincipal(identity.getPrincipal()) - .addAttributes(identity.getAttributes()) - .addCredentials(identity.getCredentials()) - .addRoles(identity.getRoles()); + // create a new builder and copy principal, attributes, credentials and roles from the original identity + QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder(identity); // add custom role source here builder.addRole("dummy"); @@ -95,6 +90,62 @@ public class RolesAugmentor implements SecurityIdentityAugmentor { } ---- +Here is another example showing how to use the client certificate available in the current link:security-built-in-authentication#mutual-tls[Mutual TLS] request to add more roles: + +[source,java] +---- +import java.security.cert.X509Certificate; +import io.quarkus.security.credential.CertificateCredential; +import io.quarkus.security.identity.AuthenticationRequestContext; +import io.quarkus.security.identity.SecurityIdentity; +import io.quarkus.security.identity.SecurityIdentityAugmentor; +import io.quarkus.security.runtime.QuarkusSecurityIdentity; +import io.smallrye.mutiny.Uni; + +import javax.enterprise.context.ApplicationScoped; +import java.util.function.Supplier; +import java.util.Set; + +@ApplicationScoped +public class RolesAugmentor implements SecurityIdentityAugmentor { + + @Override + public Uni augment(SecurityIdentity identity, AuthenticationRequestContext context) { + return Uni.createFrom().item(build(identity)); + } + + private Supplier build(SecurityIdentity identity) { + // create a new builder and copy principal, attributes, credentials and roles from the original identity + QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder(identity); + + CertificateCredential certificate = identity.getCredential(CertificateCredential.class); + if (certificate != null) { + builder.addRoles(extractRoles(certificate.getCertificate())); + } + return builder::build; + } + + private Set extractRoles(X509Certificate certificate) { + String name = certificate.getSubjectX500Principal().getName(); + + switch (name) { + case "CN=client": + return Collections.singleton("user"); + case "CN=guest-client": + return Collections.singleton("guest"); + default: + return Collections.emptySet(); + } + } +} +---- + +[NOTE] +=== +If more than one custom `SecurityIdentityAugmentor` is registered then they will be considered equal candidates and invoked in random order. +You can enforce the order by implementing a default `SecurityIdentityAugmentor#priority` method. Augmentors with higher priorities will be invoked first. +=== + == Custom JAX-RS SecurityContext If you use JAX-RS `ContainerRequestFilter` to set a custom JAX-RS `SecurityContext` then make sure `ContainerRequestFilter` runs in the JAX-RS pre-match phase by adding a `@PreMatching` annotation to it for this custom security context to be linked with Quarkus `SecurityIdentity`, for example: @@ -159,18 +210,20 @@ If you have a good reason to disable the authorization (for example, when testin @Priority(Interceptor.Priority.LIBRARY_AFTER) @ApplicationScoped public class DisabledAuthController extends AuthorizationController { - @ConfigProperty(name = "disable.authorization") + @ConfigProperty(name = "disable.authorization", defaultValue = "false") boolean disableAuthorization; @Override public boolean isAuthorizationEnabled() { - return disableAuthorization; + return !disableAuthorization; } } ---- == Registering Security Providers +=== Default providers + When running in native mode, the default behavior for GraalVM native executable generation is to only include the main "SUN" provider unless you have enabled SSL, in which case all security providers are registered. If you are not using SSL, then you can selectively register security providers by name using the `quarkus.security.security-providers` property. The following example illustrates @@ -182,6 +235,117 @@ configuration to register the "SunRsaSign" and "SunJCE" security providers: quarkus.security.security-providers=SunRsaSign,SunJCE ---- +=== BouncyCastle + +If you need to register an `org.bouncycastle.jce.provider.BouncyCastleProvider` JCE provider then please set a `BC` provider name: + +.Example Security Providers BouncyCastle Configuration +[source,properties] +---- +quarkus.security.security-providers=BC +---- + +and add the BouncyCastle provider dependency: + +[source,xml] +---- + + org.bouncycastle + bcprov-jdk15on + +---- + +=== BouncyCastle JSSE + +If you need to register an `org.bouncycastle.jsse.provider.BouncyCastleJsseProvider` JSSE provider and use it instead of the default SunJSSE provider then please set a `BCJSSE` provider name: + +.Example Security Providers BouncyCastle JSSE Configuration +[source,properties] +---- +quarkus.security.security-providers=BCJSSE + +quarkus.http.ssl.client-auth=REQUIRED + +quarkus.http.ssl.certificate.key-store-file=server-keystore.jks +quarkus.http.ssl.certificate.key-store-password=password +quarkus.http.ssl.certificate.trust-store-file=server-truststore.jks +quarkus.http.ssl.certificate.trust-store-password=password +---- + +and add the BouncyCastle TLS dependency: + +[source,xml] +---- + + org.bouncycastle + bctls-jdk15on + +---- + +=== BouncyCastle FIPS + +If you need to register an `org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider` JCE provider then please set a `BCFIPS` provider name: + +.Example Security Providers BouncyCastle FIPS Configuration +[source,properties] +---- +quarkus.security.security-providers=BCFIPS +---- + +and add the BouncyCastle FIPS provider dependency: + +[source,xml] +---- + + org.bouncycastle + bc-fips + +---- + +=== BouncyCastle JSSE FIPS + +If you need to register an `org.bouncycastle.jsse.provider.BouncyCastleJsseProvider` JSSE provider and use it in combination with `org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider` instead of the default SunJSSE provider then please set a `BCFIPSJSSE` provider name: + +.Example Security Providers BouncyCastle FIPS JSSE Configuration +[source,properties] +---- +quarkus.security.security-providers=BCFIPSJSSE + +quarkus.http.ssl.client-auth=REQUIRED + +quarkus.http.ssl.certificate.key-store-file=server-keystore.jks +quarkus.http.ssl.certificate.key-store-password=password +quarkus.http.ssl.certificate.key-store-file-type=BCFKS +quarkus.http.ssl.certificate.key-store-provider=BCFIPS +quarkus.http.ssl.certificate.trust-store-file=server-truststore.jks +quarkus.http.ssl.certificate.trust-store-password=password +quarkus.http.ssl.certificate.trust-store-file-type=BCFKS +quarkus.http.ssl.certificate.trust-store-provider=BCFIPS +---- + +and the BouncyCastle TLS dependency optimized for using the BouncyCastle FIPS provider: + +[source,xml] +---- + + org.bouncycastle + bctls-fips + + + + org.bouncycastle + bc-fips + +---- + +Note that the keystore and truststore type and provider are set to `BCFKS` and `BCFIPS`. +One can generate a keystore with this type and provider like this: + +[source,shell] +---- +keytool -genkey -alias server -keyalg RSA -keystore server-keystore.jks -keysize 2048 -keypass password -provider org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider -providerpath $PATH_TO_BC_FIPS_JAR -storetype BCFKS +---- + == Reactive Security If you are going to use security in a reactive environment, you will likely need SmallRye Context Propagation: @@ -196,6 +360,9 @@ If you are going to use security in a reactive environment, you will likely need This will allow you to propagate the identity throughout the reactive callbacks. You also need to make sure you are using an executor that is capable of propagating the identity (e.g. no `CompletableFuture.supplyAsync`), -to make sure that quarkus can propagate it. For more information see the +to make sure that Quarkus can propagate it. For more information see the link:context-propagation[Context Propagation Guide]. +== References + +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-jdbc.adoc b/docs/src/main/asciidoc/security-jdbc.adoc index 50c0ccfb32243..23fb2b79f345d 100644 --- a/docs/src/main/asciidoc/security-jdbc.adoc +++ b/docs/src/main/asciidoc/security-jdbc.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Security with JDBC @@ -44,12 +44,13 @@ The solution is located in the `security-jdbc-quickstart` {quickstarts-tree-url} First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-jdbc-quickstart \ - -Dextensions="elytron-security-jdbc, jdbc-postgresql, resteasy" + -Dextensions="elytron-security-jdbc,jdbc-postgresql,resteasy" \ + -DnoExamples cd security-jdbc-quickstart ---- @@ -141,9 +142,7 @@ import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; import javax.ws.rs.core.SecurityContext; @Path("/api/users") @@ -152,7 +151,6 @@ public class UserResource { @GET @RolesAllowed("user") @Path("/me") - @Produces(MediaType.APPLICATION_JSON) public String me(@Context SecurityContext securityContext) { return securityContext.getUserPrincipal().getName(); } @@ -171,7 +169,7 @@ quarkus.datasource.password=quarkus quarkus.datasource.jdbc.url=jdbc:postgresql:elytron-security-jdbc ---- -In our context, we are using PostgreSQL as identity store and we init the database with users and roles. +In our context, we are using PostgreSQL as identity store and we initialize the database with users and roles. [source,sql] ---- @@ -257,6 +255,7 @@ By providing the `admin:admin` credentials, the extension authenticated the user The `admin` user is authorized to access to the protected resources. The user `admin` should be forbidden to access a resource protected with `@RolesAllowed("user")` because it doesn't have this role. + [source,shell] ---- $ curl -i -X GET -u admin:admin http://localhost:8080/api/users/me @@ -268,9 +267,10 @@ Forbidden% ---- Finally, using the user `user` works and the security context contains the principal details (username for instance). + [source,shell] ---- -curl -i -X GET -u user:user http://localhost:8080/api/users/me +$ curl -i -X GET -u user:user http://localhost:8080/api/users/me HTTP/1.1 200 OK Content-Length: 4 Content-Type: text/plain;charset=UTF-8 @@ -310,7 +310,6 @@ quarkus.security.jdbc.principal-query.roles.attribute-mappings.0.to=groups include::{generated-dir}/config/quarkus-elytron-security-jdbc.adoc[opts=optional, leveloffset=+1] -== Future Work +== References -* Propose more password mappers. -* Provide an opinionated configuration. +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-jpa.adoc b/docs/src/main/asciidoc/security-jpa.adoc index a00fd77f9dfec..acacf06d410a3 100644 --- a/docs/src/main/asciidoc/security-jpa.adoc +++ b/docs/src/main/asciidoc/security-jpa.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Security with JPA @@ -45,12 +45,13 @@ The solution is located in the `security-jpa-quickstart` {quickstarts-tree-url}/ First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-jpa-quickstart \ - -Dextensions="security-jpa, jdbc-postgresql, resteasy, hibernate-orm-panache" + -Dextensions="security-jpa,jdbc-postgresql,resteasy,hibernate-orm-panache" \ + -DnoExamples cd security-jpa-quickstart ---- @@ -142,9 +143,7 @@ import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; import javax.ws.rs.core.SecurityContext; @Path("/api/users") @@ -153,7 +152,6 @@ public class UserResource { @GET @RolesAllowed("user") @Path("/me") - @Produces(MediaType.APPLICATION_JSON) public String me(@Context SecurityContext securityContext) { return securityContext.getUserPrincipal().getName(); } @@ -210,7 +208,7 @@ The `security-jpa` extension is only initialized if there is a single entity ann <1> This annotation must be present on a single entity. It can be a regular Hibernate ORM entity or a Hibernate ORM with Panache entity as in this example. <2> This indicates the field used for the user name. -<3> This indicates the field used for the password. This defaults to using bcrypt hashed passwords, but you can also configure it for clear text passwords. +<3> This indicates the field used for the password. This defaults to using bcrypt hashed passwords, but you can also configure it for clear text passwords or custom passwords. <4> This indicates the comma-separated list of roles added to the target Principal representation attributes. <5> This method allows us to add users while hashing the password with the proper bcrypt hash. @@ -229,7 +227,7 @@ quarkus.hibernate-orm.database.generation=drop-and-create ---- In our context, we are using PostgreSQL as identity store. The database schema is created by Hibernate ORM automatically -on startup (change this in production) and we initialise the database with users and roles in the `Startup` class: +on startup (change this in production) and we initialize the database with users and roles in the `Startup` class: [source,java] ---- @@ -262,6 +260,11 @@ As a result, the `security-jpa` defaults to using bcrypt-hashed passwords. == Testing the Application +[NOTE] +==== +In the following tests we use the basic authentication mechanism, you can enable it by setting `quarkus.http.auth.basic=true` in the `application.properties` file. +==== + The application is now protected and the identities are provided by our database. The very first thing to check is to ensure the anonymous access works. @@ -302,6 +305,7 @@ By providing the `admin:admin` credentials, the extension authenticated the user The `admin` user is authorized to access to the protected resources. The user `admin` should be forbidden to access a resource protected with `@RolesAllowed("user")` because it doesn't have this role. + [source,shell] ---- $ curl -i -X GET -u admin:admin http://localhost:8080/api/users/me @@ -313,9 +317,10 @@ Forbidden% ---- Finally, using the user `user` works and the security context contains the principal details (username for instance). + [source,shell] ---- -curl -i -X GET -u user:user http://localhost:8080/api/users/me +$ curl -i -X GET -u user:user http://localhost:8080/api/users/me HTTP/1.1 200 OK Content-Length: 4 Content-Type: text/plain;charset=UTF-8 @@ -371,8 +376,45 @@ When you need to create such a hashed password we provide the convenient `String function, which defaults to creating a random salt and hashing in 10 iterations (though you can specify the iterations and salt too). -NOTE: with MCF you don't need dedicated columns to store the hashing algo, the iterations count or the salt because +NOTE: with MCF you don't need dedicated columns to store the hashing algorithm, the iterations count or the salt because they're all stored in the hashed value. +You also have the possibility to store password using different hashing algorithm `@Password(value = PasswordType.CUSTOM, provider = CustomPasswordProvider.class)`: + +[source,java] +---- +@UserDefinition +@Table(name = "test_user") +@Entity +public class CustomPasswordUserEntity { + @Id + @GeneratedValue + public Long id; + + @Column(name = "username") + @Username + public String name; + + @Column(name = "password") + @Password(value = PasswordType.CUSTOM, provider = CustomPasswordProvider.class) + public String pass; + + @Roles + public String role; +} + +public class CustomPasswordProvider implements PasswordProvider { + @Override + public Password getPassword(String pass) { + byte[] digest = DatatypeConverter.parseHexBinary(pass); + return SimpleDigestPassword.createRaw(SimpleDigestPassword.ALGORITHM_SIMPLE_DIGEST_SHA_256, digest); + } +} +---- + WARN: you can also store passwords in clear text with `@Password(PasswordType.CLEAR)` but we strongly recommend against it in production. + +== References + +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-jwt.adoc b/docs/src/main/asciidoc/security-jwt.adoc index 89ad1a4c26de7..b8dd9b340296b 100644 --- a/docs/src/main/asciidoc/security-jwt.adoc +++ b/docs/src/main/asciidoc/security-jwt.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using JWT RBAC @@ -27,14 +27,14 @@ The solution is located in the `security-jwt-quickstart` {quickstarts-tree-url}/ First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-jwt-quickstart \ -DclassName="org.acme.security.jwt.TokenSecuredResource" \ -Dpath="/secured" \ - -Dextensions="resteasy-jsonb, jwt" + -Dextensions="resteasy,resteasy-jackson,smallrye-jwt,smallrye-jwt-build" cd security-jwt-quickstart ---- @@ -45,7 +45,7 @@ to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="smallrye-jwt" +./mvnw quarkus:add-extension -Dextensions="smallrye-jwt, smallrye-jwt-build" ---- This will add the following to your `pom.xml`: @@ -56,6 +56,10 @@ This will add the following to your `pom.xml`: io.quarkus quarkus-smallrye-jwt + + io.quarkus + quarkus-smallrye-jwt-build + ---- === Examine the JAX-RS resource @@ -156,7 +160,7 @@ will produce undesirable behavior since JWT claims are naturally request scoped. Now we are ready to run our application. Use: -[source,shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -170,13 +174,13 @@ $ ./mvnw compile quarkus:dev [INFO] Scanning for projects... [INFO] [INFO] ----------------------< org.acme:security-jwt-quickstart >----------------------- -[INFO] Building security-jwt-quickstart 1.0-SNAPSHOT +[INFO] Building security-jwt-quickstart 1.0.0-SNAPSHOT [INFO] --------------------------------[ jar ]--------------------------------- ... Listening for transport dt_socket at address: 5005 -2020-07-15 16:09:50,883 INFO [io.quarkus] (Quarkus Main Thread) security-jwt-quickstart 1.0-SNAPSHOT on JVM (powered by Quarkus 999-SNAPSHOT) started in 1.073s. Listening on: http://0.0.0.0:8080 +2020-07-15 16:09:50,883 INFO [io.quarkus] (Quarkus Main Thread) security-jwt-quickstart 1.0.0-SNAPSHOT on JVM (powered by Quarkus 999-SNAPSHOT) started in 1.073s. Listening on: http://0.0.0.0:8080 2020-07-15 16:09:50,885 INFO [io.quarkus] (Quarkus Main Thread) Profile dev activated. Live Coding activated. -2020-07-15 16:09:50,885 INFO [io.quarkus] (Quarkus Main Thread) Installed features: [cdi, mutiny, resteasy, resteasy-jsonb, security, smallrye-context-propagation, smallrye-jwt, vertx, vertx-web] +2020-07-15 16:09:50,885 INFO [io.quarkus] (Quarkus Main Thread) Installed features: [cdi, mutiny, resteasy, resteasy-jackson, security, smallrye-context-propagation, smallrye-jwt, vertx, vertx-web] ---- Now that the REST endpoint is running, we can access it using a command line tool like curl: @@ -303,10 +307,10 @@ Create a `security-jwt-quickstart/src/main/resources/application.properties` wit [source, properties] ---- mp.jwt.verify.publickey.location=META-INF/resources/publicKey.pem #<1> -mp.jwt.verify.issuer=https://quarkus.io/using-jwt-rbac #<2> +mp.jwt.verify.issuer=https://example.com/issuer #<2> ---- <1> We are setting public key location to point to a classpath publicKey.pem resource location. We will add this key in part B, <>. -<2> We are setting the issuer to the URL string `https://quarkus.io/using-jwt-rbac`. +<2> We are setting the issuer to the URL string `https://example.com/issuer`. === Adding a Public Key @@ -357,7 +361,7 @@ public class GenerateToken { */ public static void main(String[] args) { String token = - Jwt.issuer("https://quarkus.io/using-jwt-rbac") // <1> + Jwt.issuer("https://example.com/issuer") // <1> .upn("jdoe@quarkus.io") // <2> .groups(new HashSet<>(Arrays.asList("User", "Admin"))) // <3> .claim(Claims.birthdate.name(), "2001-07-13") // <4> @@ -374,7 +378,7 @@ in order for the token to be accepted as valid. <3> The `group` claim provides the groups and top-level roles associated with the JWT bearer. <4> The `birthday` claim. It can be considered to be a sensitive claim so you may want to consider encrypting the claims, see <>. -Note for this code to work we need the content of the RSA private key that corresponds to the public key we have in the TokenSecuredResource application. Take the following PEM content and place it into `security-jwt-quickstart/src/main/resources/META-INF/resources/privateKey.pem`: +Note for this code to work we need the content of the RSA private key that corresponds to the public key we have in the TokenSecuredResource application. Take the following PEM content and place it into `security-jwt-quickstart/src/test/resources/privateKey.pem`: .RSA Private Key PEM Content [source, text] @@ -411,6 +415,29 @@ f3cg+fr8aou7pr9SHhJlZCU= We will use a `smallrye.jwt.sign.key-location` property to point to this private signing key. +[NOTE] +.Generating Keys with OpenSSL +==== +It is also possible to generate a public and private key pair using the OpenSSL command line tool. + +.openssl commands for generating keys +[source, text] +---- +openssl genrsa -out rsaPrivateKey.pem 2048 +openssl rsa -pubout -in rsaPrivateKey.pem -out publicKey.pem +---- + +An additional step is needed for generating the private key for converting it into the PKCS#8 format. + +.openssl command for converting private key +[source, text] +---- +openssl pkcs8 -topk8 -nocrypt -inform pem -in rsaPrivateKey.pem -outform pem -out privateKey.pem +---- + +You can use the generated pair of keys instead of the keys used in this quickstart. +==== + Now we can generate a JWT to use with `TokenSecuredResource` endpoint. To do this, run the following command: .Command to Generate JWT @@ -429,7 +456,7 @@ First part - JWT headers, second part - JWT claims, third part - JWT signature. == Finally, Secured Access to /secured/roles-allowed Now let's use this to make a secured request to the /secured/roles-allowed endpoint. Make sure you have the Quarkus server running using the `./mvnw compile quarkus:dev` command, and then run the following command, making sure to use your version of the generated JWT from the previous step: -[source,shell] +[source,bash] ---- curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJqZG9lLXVzaW5nLWp3dC1yYmFjIiwiYXVkIjoidXNpbmctand0LXJiYWMiLCJ1cG4iOiJqZG9lQHF1YXJrdXMuaW8iLCJiaXJ0aGRhdGUiOiIyMDAxLTA3LTEzIiwiYXV0aF90aW1lIjoxNTUxNjUyMDkxLCJpc3MiOiJodHRwczpcL1wvcXVhcmt1cy5pb1wvdXNpbmctand0LXJiYWMiLCJyb2xlTWFwcGluZ3MiOnsiZ3JvdXAyIjoiR3JvdXAyTWFwcGVkUm9sZSIsImdyb3VwMSI6Ikdyb3VwMU1hcHBlZFJvbGUifSwiZ3JvdXBzIjpbIkVjaG9lciIsIlRlc3RlciIsIlN1YnNjcmliZXIiLCJncm91cDIiXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiamRvZSIsImV4cCI6MTU1MTY1MjM5MSwiaWF0IjoxNTUxNjUyMDkxLCJqdGkiOiJhLTEyMyJ9.aPA4Rlc4kw7n_OZZRRk25xZydJy_J_3BRR8ryYLyHTO1o68_aNWWQCgpnAuOW64svPhPnLYYnQzK-l2vHX34B64JySyBD4y_vRObGmdwH_SEufBAWZV7mkG3Y4mTKT3_4EWNu4VH92IhdnkGI4GJB6yHAEzlQI6EdSOa4Nq8Gp4uPGqHsUZTJrA3uIW0TbNshFBm47-oVM3ZUrBz57JKtr0e9jv0HjPQWyvbzx1HuxZd6eA8ow8xzvooKXFxoSFCMnxotd3wagvYQ9ysBa89bgzL-lhjWtusuMFDUVYwFqADE7oOSOD4Vtclgq8svznBQ-YpfTHfb9QEcofMlpyjNA" http://127.0.0.1:8080/secured/roles-allowed; echo ---- @@ -438,7 +465,7 @@ curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUI [source,shell] ---- $ curl -H "Authorization: Bearer eyJraWQ..." http://127.0.0.1:8080/secured/roles-allowed; echo -hello + jdoe@quarkus.io, isHttps: false, authScheme: MP-JWT, hasJWT: true, birthdate: 2001-07-13 +hello + jdoe@quarkus.io, isHttps: false, authScheme: Bearer, hasJWT: true, birthdate: 2001-07-13 ---- Success! We now have: @@ -459,7 +486,7 @@ hold of the caller `JsonWebToken` interface by casting the `SecurityContext#getU The `JsonWebToken` interface defines methods for accessing claims in the underlying JWT. It provides accessors for common claims that are required by the {mp-jwt} specification as well as arbitrary claims that may exist in the JWT. -All the JWT claims can also be injected. Let's expand our `TokenSecuredResource` with another endpoint /secured/roles-allowed-admin which users the injected `birthdate` claim +All the JWT claims can also be injected. Let's expand our `TokenSecuredResource` with another endpoint /secured/roles-allowed-admin which uses the injected `birthdate` claim (as opposed to getting it from `JsonWebToken`): [source, java] @@ -542,7 +569,7 @@ public class TokenSecuredResource { Now generate the token again and run: -[source,shell] +[source,bash] ---- curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJqZG9lLXVzaW5nLWp3dC1yYmFjIiwiYXVkIjoidXNpbmctand0LXJiYWMiLCJ1cG4iOiJqZG9lQHF1YXJrdXMuaW8iLCJiaXJ0aGRhdGUiOiIyMDAxLTA3LTEzIiwiYXV0aF90aW1lIjoxNTUxNjUyMDkxLCJpc3MiOiJodHRwczpcL1wvcXVhcmt1cy5pb1wvdXNpbmctand0LXJiYWMiLCJyb2xlTWFwcGluZ3MiOnsiZ3JvdXAyIjoiR3JvdXAyTWFwcGVkUm9sZSIsImdyb3VwMSI6Ikdyb3VwMU1hcHBlZFJvbGUifSwiZ3JvdXBzIjpbIkVjaG9lciIsIlRlc3RlciIsIlN1YnNjcmliZXIiLCJncm91cDIiXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiamRvZSIsImV4cCI6MTU1MTY1MjM5MSwiaWF0IjoxNTUxNjUyMDkxLCJqdGkiOiJhLTEyMyJ9.aPA4Rlc4kw7n_OZZRRk25xZydJy_J_3BRR8ryYLyHTO1o68_aNWWQCgpnAuOW64svPhPnLYYnQzK-l2vHX34B64JySyBD4y_vRObGmdwH_SEufBAWZV7mkG3Y4mTKT3_4EWNu4VH92IhdnkGI4GJB6yHAEzlQI6EdSOa4Nq8Gp4uPGqHsUZTJrA3uIW0TbNshFBm47-oVM3ZUrBz57JKtr0e9jv0HjPQWyvbzx1HuxZd6eA8ow8xzvooKXFxoSFCMnxotd3wagvYQ9ysBa89bgzL-lhjWtusuMFDUVYwFqADE7oOSOD4Vtclgq8svznBQ-YpfTHfb9QEcofMlpyjNA" http://127.0.0.1:8080/secured/roles-allowed-admin; echo ---- @@ -550,29 +577,28 @@ curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUI [source,shell] ---- $ curl -H "Authorization: Bearer eyJraWQ..." http://127.0.0.1:8080/secured/roles-allowed-admin; echo -hello + jdoe@quarkus.io, isHttps: false, authScheme: MP-JWT, hasJWT: true, birthdate: 2001-07-13 +hello + jdoe@quarkus.io, isHttps: false, authScheme: Bearer, hasJWT: true, birthdate: 2001-07-13 ---- === Package and run the application -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file: +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file: .Runner jar Example -[source,shell] +[source,shell,subs=attributes+] ---- -Scotts-iMacPro:security-jwt-quickstart starksm$ ./mvnw clean package +$ ./mvnw clean package [INFO] Scanning for projects... ... -[INFO] [io.quarkus.creator.phase.runnerjar.RunnerJarPhase] Building jar: /Users/starksm/Dev/JBoss/Protean/starksm64-quarkus-quickstarts/security-jwt-quickstart/target/security-jwt-quickstart-runner.jar -Scotts-iMacPro:security-jwt-quickstart starksm$ java -jar target/security-jwt-quickstart-runner.jar -2019-03-28 14:27:48,839 INFO [io.quarkus] (main) Quarkus 0.12.0 started in 0.796s. Listening on: http://[::]:8080 -2019-03-28 14:27:48,841 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb, security, smallrye-jwt] +$ java -jar target/quarkus-app/quarkus-run.jar +2019-03-28 14:27:48,839 INFO [io.quarkus] (main) Quarkus {quarkus-version} started in 0.796s. Listening on: http://[::]:8080 +2019-03-28 14:27:48,841 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson, security, smallrye-jwt] ---- You can also generate the native executable with `./mvnw clean package -Pnative`. .Native Executable Example [source,shell] ---- -Scotts-iMacPro:security-jwt-quickstart starksm$ ./mvnw clean package -Pnative +$ ./mvnw clean package -Pnative [INFO] Scanning for projects... ... [security-jwt-quickstart-runner:25602] universe: 493.17 ms @@ -590,9 +616,9 @@ Scotts-iMacPro:security-jwt-quickstart starksm$ ./mvnw clean package -Pnative [INFO] Finished at: 2019-03-28T14:30:56-07:00 [INFO] ------------------------------------------------------------------------ -Scotts-iMacPro:security-jwt-quickstart starksm$ ./target/security-jwt-quickstart-runner +$ ./target/security-jwt-quickstart-runner 2019-03-28 14:31:37,315 INFO [io.quarkus] (main) Quarkus 0.12.0 started in 0.006s. Listening on: http://[::]:8080 -2019-03-28 14:31:37,316 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb, security, smallrye-jwt] +2019-03-28 14:31:37,316 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson, security, smallrye-jwt] ---- === Explore the Solution @@ -638,12 +664,14 @@ SmallRye JWT provides more properties which can be used to customize the token p [cols="> and learn how == How to check the errors in the logs == -Set `quarkus.log.category."io.quarkus.smallrye.jwt.runtime.auth.MpJwtValidator".level=TRACE` to see more details about the token verification or decryption errors. +Set `quarkus.log.category."io.quarkus.smallrye.jwt.runtime.auth.MpJwtValidator".level=TRACE` and `quarkus.log.category."io.quarkus.smallrye.jwt.runtime.auth.MpJwtValidator".min-level=TRACE` to see more details about the token verification or decryption errors. + +== Custom Factories + +`io.smallrye.jwt.auth.principal.DefaultJWTCallerPrincipalFactory` is used by default to parse and verify JWT tokens and convert them to `JsonWebToken` principals. +It uses `MP JWT` and `smallrye-jwt` properties listed in the `Configuration` section to verify and customize JWT tokens. + +If you need to provide your own factory, for example, to avoid verifying the tokens again which have already been verified by the firewall, then you can either use a `ServiceLoader` mechanism by providing a `META-INF/services/io.smallrye.jwt.auth.principal.JWTCallerPrincipalFactory` resource or simply have an `Alternative` CDI bean implementation like this one: + +[source,java] +---- +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import javax.annotation.Priority; +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Alternative; +import org.jose4j.jwt.JwtClaims; +import org.jose4j.jwt.consumer.InvalidJwtException; +import io.smallrye.jwt.auth.principal.DefaultJWTCallerPrincipal; +import io.smallrye.jwt.auth.principal.JWTAuthContextInfo; +import io.smallrye.jwt.auth.principal.JWTCallerPrincipal; +import io.smallrye.jwt.auth.principal.JWTCallerPrincipalFactory; +import io.smallrye.jwt.auth.principal.ParseException; + +@ApplicationScoped +@Alternative +@Priority(1) +public class TestJWTCallerPrincipalFactory extends JWTCallerPrincipalFactory { + + @Override + public JWTCallerPrincipal parse(String token, JWTAuthContextInfo authContextInfo) throws ParseException { + try { + // Token has already been verified, parse the token claims only + String json = new String(Base64.getUrlDecoder().decode(token.split("\\.")[1]), StandardCharsets.UTF_8); + return new DefaultJWTCallerPrincipal(JwtClaims.parse(json)); + } catch (InvalidJwtException ex) { + throw new ParseException(ex.getMessage()); + } + } +} +---- [[generate-jwt-tokens]] == Generate JWT tokens with SmallRye JWT @@ -736,6 +810,19 @@ Finally both the confidentiality and integrity of the claims can be further enfo SmallRye JWT provides an API for securing the JWT claims using all of these options. +== Maven dependency + +[source,xml] +---- + + io.quarkus + quarkus-smallrye-jwt-build + +---- + +Note you can use Smallrye JWT Build API without having to create MP JWT endpoints. +It can also be excluded from `quarkus-smallrye-jwt` if MP JWT endpoints do not need to generate JWT tokens. + === Create JwtClaimsBuilder and set the claims The first step is to initialize a `JwtClaimsBuilder` using one of the options below and add some claims to it: @@ -832,7 +919,9 @@ String jwt = Jwt.claims("/tokenClaims.json").innerSign().encrypt(); === Fast JWT Generation If `smallrye.jwt.sign.key-location` or/and `smallrye.jwt.encrypt.key-location` properties are set then one can secure the existing claims (resources, maps, JsonObjects) with a single call: -``` + +[source,java] +---- // More compact than Jwt.claims("/claims.json").sign(); Jwt.sign("/claims.json"); @@ -841,20 +930,24 @@ Jwt.encrypt("/claims.json"); // More compact than Jwt.claims("/claims.json").innerSign().encrypt(); Jwt.signAndEncrypt("/claims.json"); -``` +---- As mentioned above, `iat`, `exp`, `jti` and `iss` claims will be added if needed. === SmallRye JWT Builder configuration -Smallrye JWT supports the following properties which can be used to customize the way claims are signed and encrypted: +SmallRye JWT supports the following properties which can be used to customize the way claims are signed and encrypted: [cols="> get() { return identity.checkPermission(new AuthPermission("{resource_name}")) - .thenCompose(granted -> { + .transform(granted -> { if (granted) { return CompletableFuture.completedFuture(doGetState()); } @@ -359,3 +359,4 @@ include::{generated-dir}/config/quarkus-keycloak-keycloak-policy-enforcer-config * https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services Documentation] * https://openid.net/connect/[OpenID Connect] * https://tools.ietf.org/html/rfc7519[JSON Web Token] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-ldap.adoc b/docs/src/main/asciidoc/security-ldap.adoc index 1d0f76033f78c..095a587404d0d 100644 --- a/docs/src/main/asciidoc/security-ldap.adoc +++ b/docs/src/main/asciidoc/security-ldap.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Security with an LDAP Realm @@ -44,17 +44,18 @@ The solution is located in the `security-ldap-quickstart` {quickstarts-tree-url} First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-ldap-quickstart \ - -Dextensions="elytron-security-ldap, resteasy" + -Dextensions="elytron-security-ldap,resteasy" \ + -DnoExamples cd security-ldap-quickstart ---- This command generates a Maven project, importing the `elytron-security-ldap` extension -which is a https://docs.wildfly.org/19/WildFly_Elytron_Security.html#ldap-security-realm[`wildfly-elytron-realm-ldap`] adapter for Quarkus applications. +which is a `wildfly-elytron-realm-ldap` adapter for Quarkus applications. If you already have your Quarkus project configured, you can add the `elytron-security-ldap` extension to your project by running the following command in your project base directory: @@ -136,9 +137,7 @@ import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; import javax.ws.rs.core.SecurityContext; @Path("/api/users") @@ -147,7 +146,6 @@ public class UserResource { @GET @RolesAllowed("standardRole") @Path("/me") - @Produces(MediaType.APPLICATION_JSON) public String me(@Context SecurityContext securityContext) { return securityContext.getUserPrincipal().getName(); } @@ -173,6 +171,8 @@ quarkus.security.ldap.identity-mapping.attribute-mappings."0".filter=(member=uid quarkus.security.ldap.identity-mapping.attribute-mappings."0".filter-base-dn=ou=roles,ou=tool,o=YourCompany,c=DE ---- +`{0}` is substituted by the `uid`, whereas `{1}` will be substituted by the `dn` of the user entry. + The `elytron-security-ldap` extension requires a dir-context and an identity-mapping with at least one attribute-mapping to authenticate the user and its identity. == Testing the Application @@ -217,6 +217,7 @@ By providing the `adminUser:adminUserPassword` credentials, the extension authen The `adminUser` user is authorized to access to the protected resources. The user `adminUser` should be forbidden to access a resource protected with `@RolesAllowed("standardRole")` because it doesn't have this role. + [source,shell] ---- $ curl -i -X GET -u adminUser:adminUserPassword http://localhost:8080/api/users/me @@ -228,9 +229,10 @@ Forbidden% ---- Finally, using the user `standardUser` works and the security context contains the principal details (username for instance). + [source,shell] ---- -curl -i -X GET -u standardUser:standardUserPassword http://localhost:8080/api/users/me +$ curl -i -X GET -u standardUser:standardUserPassword http://localhost:8080/api/users/me HTTP/1.1 200 OK Content-Length: 4 Content-Type: text/plain;charset=UTF-8 @@ -242,3 +244,8 @@ user% == Configuration Reference include::{generated-dir}/config/quarkus-elytron-security-ldap.adoc[opts=optional, leveloffset=+1] + +== References + +* https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol[LDAP] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-oauth2.adoc b/docs/src/main/asciidoc/security-oauth2.adoc index fdc66290231ed..eccfe5c01a36c 100644 --- a/docs/src/main/asciidoc/security-oauth2.adoc +++ b/docs/src/main/asciidoc/security-oauth2.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OAuth2 RBAC @@ -35,14 +35,14 @@ It contains a very simple UI to use the JAX-RS resources created here, too. First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-oauth2-quickstart \ -DclassName="org.acme.security.oauth2.TokenSecuredResource" \ -Dpath="/secured" \ - -Dextensions="resteasy-jsonb, security-oauth2" + -Dextensions="resteasy,resteasy-jackson,security-oauth2" cd security-oauth2-quickstart ---- @@ -62,7 +62,6 @@ If you don't want to use the Maven plugin, you can just include the dependency i Open the `src/main/java/org/acme/security/oauth2/TokenSecuredResource.java` file and see the following content: -.Basic REST Endpoint [source,java] ---- package org.acme.security.oauth2; @@ -124,39 +123,34 @@ public class TokenSecuredResource { <3> Here we obtain the current request user/caller `Principal`. For an unsecured call this will be null, so we build the user name by checking `caller` against null. <4> The reply we build up makes use of the caller name, the `isSecure()` and `getAuthenticationScheme()` states of the request `SecurityContext`. -== Run the application -Now we are ready to run our application. Use: +=== Setting up application.properties -[source,shell] +You need to configure your application with the following minimal properties: + +[source, properties] ---- -./mvnw compile quarkus:dev +quarkus.oauth2.client-id=client_id +quarkus.oauth2.client-secret=secret +quarkus.oauth2.introspection-url=http://oauth-server/introspect ---- -and you should see output similar to: +You need to specify the introspection URL of your authentication server and the `client-id` / `client-secret` that your application will use to authenticate itself to the authentication server. + +The extension will then use this information to validate the token and recover the information associate with it. -.quarkus:dev Output -[source,shell] ----- -$ ./mvnw clean compile quarkus:dev -[INFO] Scanning for projects... -[INFO] -[INFO] ---------------------< org.acme:security-oauth2-quickstart >--------------------- -[INFO] Building security-oauth2-quickstart 1.0-SNAPSHOT -[INFO] --------------------------------[ jar ]--------------------------------- -... -[INFO] --- quarkus-maven-plugin:999-SNAPSHOT:dev (default-cli) @ security-oauth2-quickstart --- -Listening for transport dt_socket at address: 5005 -2019-07-16 09:58:09,753 INFO [io.qua.dep.QuarkusAugmentor] (main) Beginning quarkus augmentation -2019-07-16 09:58:10,884 INFO [io.qua.dep.QuarkusAugmentor] (main) Quarkus augmentation completed in 1131ms -2019-07-16 09:58:11,385 INFO [io.quarkus] (main) Quarkus 0.20.0 started in 1.813s. Listening on: http://[::]:8080 -2019-07-16 09:58:11,391 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb, security, security-oauth2] +For all configuration properties, see the link:#config-reference[Configuration reference] section at the end of this guide. + +== Run the application + +Now we are ready to run our application. Use: +[source,bash] +---- +./mvnw quarkus:dev ---- Now that the REST endpoint is running, we can access it using a command line tool like curl: -.curl command for /secured/permit-all [source,shell] ---- $ curl http://127.0.0.1:8080/secured/permit-all; echo @@ -169,6 +163,8 @@ We have not provided any token in our request, so we would not expect that there * `isSecure` is false as https is not used * `authScheme` is null +=== Securing the endpoint + So now let's actually secure something. Take a look at the new endpoint method `helloRolesAllowed` in the following: [source,java] @@ -219,7 +215,6 @@ public class TokenSecuredResource { After you make this addition to your `TokenSecuredResource`, try `curl -v http://127.0.0.1:8080/secured/roles-allowed; echo` to attempt to access the new endpoint. Your output should be: -.curl command for /secured/roles-allowed [source,shell] ---- $ curl -v http://127.0.0.1:8080/secured/roles-allowed; echo @@ -243,34 +238,13 @@ Not authorized Excellent, we have not provided any OAuth2 token in the request, so we should not be able to access the endpoint, and we were not. Instead we received an HTTP 401 Unauthorized error. We need to obtain and pass in a valid OAuth2 token to access that endpoint. There are two steps to this, 1) configuring our {extension-name} extension with information on how to validate the token, and 2) generating a matching token with the appropriate claims. -== Configuring the {extension-name} Extension Security Information - -include::{generated-dir}/config/quarkus-elytron-security-oauth2.adoc[opts=optional, leveloffset=+1] - -=== Setting up application.properties - -For part A of step 1, create a `security-oauth2-quickstart/src/main/resources/application.properties` with the following content: - -.application.properties for TokenSecuredResource -[source, properties] ----- -quarkus.oauth2.client-id=client_id -quarkus.oauth2.client-secret=secret -quarkus.oauth2.introspection-url=http://oauth-server/introspect ----- - -You need to specify the introspection URL of your authentication server and the `client-id` / `client-secret` that your application will use to authenticate itself to the authentication server. - -The extension will then use this information to validate the token and recover the information associate with it. - - === Generating a token You need to obtain the token from a standard OAuth2 authentication server (https://www.keycloak.org/[Keycloak] for example) using the token endpoint. You can find below a curl example of such call for a `client_credential` flow: -[source,shell] +[source,bash] ---- curl -X POST "http://oauth-server/token?grant_type=client_credentials" \ -H "Accept: application/json" -H "Authorization: Basic Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ=" @@ -284,10 +258,9 @@ It should respond something like that... ---- -== Finally, Secured Access to /secured/roles-allowed +=== Finally, make a secured request to /secured/roles-allowed Now let's use this to make a secured request to the `/secured/roles-allowed` endpoint -.curl Command for /secured/roles-allowed With a token [source,shell] ---- $ curl -H "Authorization: Bearer 60acf56d-9daf-49ba-b3be-7a423d9c7288" http://127.0.0.1:8080/secured/roles-allowed; echo @@ -307,20 +280,20 @@ You can customize the name of the claim to use for the roles with the `quarkus.o == Package and run the application -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file: -.Runner jar Example -[source,shell] +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file: + +[source,shell,subs=attributes+] ---- $ ./mvnw clean package [INFO] Scanning for projects... ... -$ java -jar target/security-oauth2-quickstart-runner.jar -2019-03-28 14:27:48,839 INFO [io.quarkus] (main) Quarkus 0.20.0 started in 0.796s. Listening on: http://[::]:8080 -2019-03-28 14:27:48,841 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb, security, security-oauth2] +$ java -jar target/quarkus-app/quarkus-run.jar +2019-03-28 14:27:48,839 INFO [io.quarkus] (main) Quarkus {quarkus-version} started in 0.796s. Listening on: http://[::]:8080 +2019-03-28 14:27:48,841 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson, security, security-oauth2] ---- You can also generate the native executable with `./mvnw clean package -Pnative`. -.Native Executable Example + [source,shell] ---- $ ./mvnw clean package -Pnative @@ -343,7 +316,7 @@ $ ./mvnw clean package -Pnative $ ./target/security-oauth2-quickstart-runner 2019-03-28 14:31:37,315 INFO [io.quarkus] (main) Quarkus 0.20.0 started in 0.006s. Listening on: http://[::]:8080 -2019-03-28 14:31:37,316 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jsonb, security, security-oauth2] +2019-03-28 14:31:37,316 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson, security, security-oauth2] ---- [[integration-testing]] @@ -465,3 +438,14 @@ class TokenSecuredResourceTest { ==== `@QuarkusTestResource` applies to all tests, not just `TokenSecuredResourceTest`. ==== + + +== References + +* https://tools.ietf.org/html/rfc6749[OAuth2] +* link:security[Quarkus Security] + +[[config-reference]] +== Configuration Reference + +include::{generated-dir}/config/quarkus-elytron-security-oauth2.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/security-openid-connect-client.adoc b/docs/src/main/asciidoc/security-openid-connect-client.adoc new file mode 100644 index 0000000000000..ae62a005c7bc5 --- /dev/null +++ b/docs/src/main/asciidoc/security-openid-connect-client.adoc @@ -0,0 +1,437 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Using OpenID Connect and OAuth2 Client and Filters to manage access tokens + +include::./attributes.adoc[] +:toc: + +This guide explains how to use: + + * `quarkus-oidc-client` and `quarkus-oidc-client-filter` extensions to acquire and refresh access tokens from OpenId Connect and OAuth 2.0 compliant Authorization Servers such as https://www.keycloak.org/about.html[Keycloak] + * `quarkus-oidc-token-propagation` extension to propagate the current bearer or authorization code flow access tokens + +The access tokens managed by these extensions can be used as HTTP Authorization Bearer tokens to access the remote services. + +== OidcClient + +`quarkus-oidc-client` extension provides a reactive `io.quarkus.oidc.client.OidcClient` which can be used to acquire and refresh tokens using SmallRye Mutiny `Uni` and `Vert.x WebClient`. + +`OidcClient` is initialized at the build time with the IDP token endpoint URL which can be auto-discovered or manually configured and uses this endpoint to acquire access tokens using `client_credentials` or `password` token grants and refresh the tokens using `refresh_token` grant. + +Here is how `OidcClient` can be configured to use the `client_credentials` grant: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus2/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.secret=secret +---- + +Here is how `OidcClient` can be configured to use the `password` grant: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus2/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.secret=secret +quarkus.oidc-client.grant.type=password +quarkus.oidc-client.grant-options.password.username=alice +quarkus.oidc-client.grant-options.password.password=alice +---- +In both cases `OidcClient` will auto-discover the token endpoint URL and use it to acquire the tokens. + +=== Use OidcClient directly + +One can use `OidcClient` directly as follows: + +[source,java] +---- +import javax.inject.PostConstruct; +import javax.inject.Inject; +import javax.ws.rs.GET; + +import io.quarkus.oidc.client.OidcClient; +import io.quarkus.oidc.client.Tokens; + +@Path("/service") +public class OidcClientResource { + + @Inject + OidcClient client; + + volatile Tokens currentTokens; + + @PostConstruct + public init() { + currentTokens = client.getTokens().await().indefinitely(); + } + + @GET + public String getResponse() { + + Tokens tokens = currentTokens; + if (tokens.isAccessTokenExpired()) { + tokens = client.refreshTokens(tokens.getRefreshToken()).await().indefinitely(); + currentTokens = tokens; + } + // use tokens.getAccessToken() to configure MP RestClient Authorization header/etc + } +} +---- + +=== Use OidcClient in MicroProfile RestClient client filter + +`quarkus-oidc-client-filter` extension provides `io.quarkus.oidc.client.filter.OidcClientRequestFilter` JAX-RS ClientRequestFilter which uses `OidcClient` to acquire the access token, refresh it if needed, and set it as an HTTP `Authorization` `Bearer` scheme value. + +By default, this filter will get `OidcClient` to acquire the first pair of access and refresh tokens at its initialization time. If the access tokens are short-lived and refresh tokens are not available then the token acquisition should be delayed with `quarkus.oidc-client.early-tokens-acquisition=false`. + +You can selectively register `OidcClientRequestFilter` by using either `io.quarkus.oidc.client.filter.OidcClientFilter` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider` annotations: + +[source,java] +---- +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.client.filter.OidcClientFilter; + +@RegisterRestClient +@OidcClientFilter +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- + +or + +[source,java] +---- +import org.eclipse.microprofile.rest.client.annotation.RegisterProvider; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.client.filter.OidcClientRequestFilter; + +@RegisterRestClient +@RegisterProvider(OidcClientRequestFilter.class) +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- + +Alternatively, `OidcClientRequestFilter` can be registered automatically with all MP Rest or JAX-RS clients if `quarkus.oidc-client-filter.register-filter=true` property is set. + +=== Use injected Tokens + +If you prefer you can use your own custom filter and inject `Tokens`: + +[source,java] +---- +@Provider +@Priority(Priorities.AUTHENTICATION) +@RequestScoped +public class OidcClientRequestCustomFilter implements ClientRequestFilter { + + @Inject + Tokens tokens; + + @Override + public void filter(ClientRequestContext requestContext) throws IOException { + requestContext.getHeaders().add(HttpHeaders.AUTHORIZATION, "Bearer " + tokens.getAccessToken()); + } +} +---- + +The `Tokens` producer will acquire and refresh the tokens, and the custom filter will decide how and when to use the token. + +See also the previous section about delaying the token acquisition in some cases. + +=== Refreshing Access Tokens + +Both `OidcClientRequestFilter` and `Tokens` producer will refresh the current expired access token if the refresh token is available. +Additionally, `quarkus.oidc-client.refresh-token-time-skew` property can be used for a preemptive access token refreshment to avoid sending nearly expired access tokens which may cause HTTP 401 errors. For example if this property is set to `3S` and the access token will expire in less than 3 seconds then this token will be auto-refreshed. + +If the access token needs to be refreshed but no refresh token is available then an attempt will be made to acquire a new token using the configured grant such as `client_credentials`. + +Please note that some OpenId Connect Providers will not return a refresh token in a `client_credentials` grant response. For example, starting from Keycloak 12 a refresh token will not be returned by default for `client_credentials`. The providers may also restrict a number of times a refresh token can be used. + +=== OidcClients + +`io.quarkus.oidc.client.OidcClients` is a container of `OidcClient`s - it includes a default `OidcClient` (which can also be injected directly as described above) and named clients which can be configured like this: + +[source,properties] +---- +quarkus.oidc-client.client-enabled=false + +quarkus.oidc-client.jwt-secret.auth-server-url=${keycloak.url}/realms/quarkus2/ +quarkus.oidc-client.jwt-secret.client-id=quarkus-app +quarkus.oidc-client.jwt-secret.credentials.jwt.secret=AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow +---- + +Note in this case the default client is disabled with a `client-enabled=false` property. The `jwt-secret` client can be accessed like this: + +[source,java] +---- +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.quarkus.oidc.client.OidcClient; +import io.quarkus.oidc.client.OidcClients; + +@Path("/clients") +public class OidcClientResource { + + @Inject + OidcClients clients; + + @GET + public String getResponse() { + OidcClient client = clients.getClient("jwt-secret"); + // use this client to get the token + } +} +---- + +[NOTE] +==== +If you also use link:security-openid-connect-multitenancy[OIDC multitenancy] and each OIDC tenant has its own associated `OidcClient` then you can use a Vert.x `RoutingContext` `tenantId` attribute, for example: + +[source,java] +---- +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.quarkus.oidc.client.OidcClient; +import io.quarkus.oidc.client.OidcClients; +import io.vertx.ext.web.RoutingContext; + +@Path("/clients") +public class OidcClientResource { + + @Inject + OidcClients clients; + @Inject + RoutingContext context; + + @GET + public String getResponse() { + String tenantId = context.get("tenantId"); + // named OIDC tenant and client configurations use the same key: + OidcClient client = clients.getClient(tenantId); + // use this client to get the token + } +} +---- +==== + +If you need you can also create new `OidcClient` programmatically like this: + +[source,java] +---- +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import io.quarkus.oidc.client.OidcClient; +import io.quarkus.oidc.client.OidcClients; +import io.quarkus.oidc.client.OidcClientConfig; + +import io.smallrye.mutiny.Uni; + +@Path("/clients") +public class OidcClientResource { + + @Inject + OidcClients clients; + + @GET + public String getResponse() { + OidcClientConfig cfg = new OidcClientConfig(); + cfg.setId("myclient"); + cfg.setAuthServerUrl("http://localhost:8081/auth/realms/quarkus/"); + cfg.setClientId("quarkus"); + cfg.getCredentials().setSecret("secret"); + Uni client = clients.newClient(config); + // use this client to get the token + } +} +---- + +[[oidc-client-authentication]] +=== OidcClient Authentication + +`OidcClient` has to authenticate to the OpenId Connect Provider for the `client_credentials` and other grant requests to succeed. +All the https://openid.net/specs/openid-connect-core-1_0.html#ClientAuthentication[OIDC Client Authentication] options are supported, for example: + +`client_secret_basic`: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.secret=mysecret +---- + +`client_secret_post`: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.client-secret.value=mysecret +quarkus.oidc-client.credentials.client-secret.method=post +---- + +`client_secret_jwt`: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.jwt.secret=AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow +---- + +`private_key_jwt` with the PEM key file: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.jwt.key-file=privateKey.pem +---- + +`private_key_jwt` with the key store file: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${keycloak.url}/realms/quarkus/ +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.jwt.key-store-file=keystore.jks +quarkus.oidc-client.credentials.jwt.key-store-password=mypassword +quarkus.oidc-client.credentials.jwt.key-password=mykeypassword +quarkus.oidc-client.credentials.jwt.key-id=mykey +---- + +Using `private_key_jwt` or `private_key_jwt` authentication methods ensures that no client secret goes over the wire. + +[[token-propagation]] +== Token Propagation in MicroProfile RestClient client filter + +`quarkus-oidc-token-propagation` extension provide `io.quarkus.oidc.token.propagation.AccessTokenRequestFilter` and `io.quarkus.oidc.token.propagation.JsonWebTokenRequestFilter` JAX-RS ClientRequestFilters which propagates the current link:security-openid-connect[Bearer] or link:security-openid-connect-web-authentication[Authorization Code Flow] access token as an HTTP `Authorization` `Bearer` scheme value. + +=== AccessTokenRequestFilter + +`AccessTokenRequestFilter` treats all tokens as Strings and as such it can work with both JWT and opaque tokens. + +When you need to propagate the current Authorization Code Flow access token then `AccessTokenRequestFilter` will be the best option as such tokens do not need to be exchanged or otherwise re-enhanced. Authorization Code Flow access tokens may be also be opaque/binary tokens. + +You can selectively register `AccessTokenRequestFilter` by using either `io.quarkus.oidc.token.propagation.AccessToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider`, for example: + +[source,java] +---- +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.AccessToken; + +@RegisterRestClient +@AccessToken +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- +or + +[source,java] +---- +import org.eclipse.microprofile.rest.client.annotation.RegisterProvider; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.AccessTokenRequestFilter; + +@RegisterRestClient +@RegisterProvider(AccessTokenRequestFilter.class) +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- + +Alternatively, `AccessTokenRequestFilter` can be registered automatically with all MP Rest or JAX-RS clients if `quarkus.oidc-token-propagation.register-filter` property is set to `true` and `quarkus.oidc-token-propagation.json-web-token` property is set to `false` (which is a default value). + +This filter will be additionally enhanced in the future to support exchanging the access tokens before propagating them. + +=== JsonWebTokenRequestFilter + +Using `JsonWebTokenRequestFilter` is recommended if you work with Bearer JWT tokens where these tokens can have their claims such as `issuer` and `audience` modified and the updated tokens secured (for example, re-signed) again. It expects an injected `org.eclipse.microprofile.jwt.JsonWebToken` and therefore will not work with the opaque tokens. + +Direct end to end Bearer token propagation should be avoided if possible. For example, `Client -> Service A -> Service B` where `Service B` receives a token sent by `Client` to `Service A`. In such cases `Service B` will not be able to distinguish if the token came from `Service A` or from `Client` directly. For `Service B` to verify the token came from `Service A` it should be able to assert a new issuer and audience claims. + +`JsonWebTokenRequestFilter` makes it easy for `Service A` implemementations to update the injected `org.eclipse.microprofile.jwt.JsonWebToken` with the new `issuer` and `audience` claim values and secure the updated token again with a new signature. The only difficult step is to ensure `Service A` has a signing key - it should be provisioned from a secure file system or from the remote secure storage such as Vault. + +You can selectively register `JsonWebTokenRequestFilter` by using either `io.quarkus.oidc.token.propagation.JsonWebToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider`, for example: + +[source,java] +---- +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.JsonWebToken; + +@RegisterRestClient +@AccessToken +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- +or + +[source,java] +---- +import org.eclipse.microprofile.rest.client.annotation.RegisterProvider; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.JsonWebTokenRequestFilter; + +@RegisterRestClient +@RegisterProvider(JsonWebTokenTokenRequestFilter.class) +@Path("/") +public interface ProtectedResourceService { + + @GET + String getUserName(); +} +---- + +Alternatively, `JsonWebTokenRequestFilter` can be registered automatically with all MP Rest or JAX-RS clients if both `quarkus.oidc-token-propagation.register-filter` and ``quarkus.oidc-token-propagation.json-web-token` properties are set to `true`. + +If this filter has to update the inject token and secure it with a new signature again then you can configure it like this: + +[source,properties] +---- +quarkus.oidc-token-propagation.secure-json-web-token=true +smallrye.jwt.sign.key.location=/privateKey.pem +# Set a new issuer +smallrye.jwt.new-token.issuer=http://frontend-resource +# Set a new audience +smallrye.jwt.new-token.audience=http://downstream-resource +# Override the existing token issuer and audience claims if they are already set +smallrye.jwt.new-token.override-matching-claims=true +---- + + +This filter will be additionally enhanced in the future to support exchanging the access tokens before propagating them. + +== References + +* link:security[Quarkus Security] +* link:security-openid-connect[Quarkus - Using OpenID Connect to Protect Service Applications using Bearer Token Authorization] +* link:security-openid-connect-web-authentication[Quarkus - Using OpenID Connect to Protect Web Applications using Authorization Code Flow] diff --git a/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc b/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc index abb30ee4de6ce..7678364c6d6f3 100644 --- a/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OpenID Connect Multi-Tenancy @@ -42,19 +42,20 @@ However, you can go right to the completed example. Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. -The solution is located in the `security-openid-connect-multi-tenancy` {quickstarts-tree-url}/security-openid-connect-multi-tenancy[directory]. +The solution is located in the `security-openid-connect-multi-tenancy-quickstart` {quickstarts-tree-url}/security-openid-connect-multi-tenancy-quickstart[directory]. == Creating the Maven Project First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ - -DprojectArtifactId=security-openid-connect-multi-tenancy \ - -Dextensions="oidc, resteasy-jsonb" -cd security-openid-connect-multi-tenancy + -DprojectArtifactId=security-openid-connect-multi-tenancy-quickstart \ + -Dextensions="oidc,resteasy-jackson" \ + -DnoExamples +cd security-openid-connect-multi-tenancy-quickstart ---- If you already have your Quarkus project configured, you can add the `oidc` extension @@ -151,6 +152,24 @@ public class CustomTenantResolver implements TenantResolver { From the implementation above, tenants are resolved from the request path so that in case no tenant could be inferred, `null` is returned to indicate that the default tenant configuration should be used. +[NOTE] +=== +If you also use link:hibernate-orm#multitenancy[Hibernate ORM multitenancy] and both OIDC and Hibernate ORM tenant IDs are the same and must be extracted from the Vert.x `RoutingContext` then you can pass the tenant id from the OIDC Tenant Resolver to the Hibernate ORM Tenant Resolver as a `RoutingContext` attribute, for example: + +[source,java] +---- +public class CustomTenantResolver implements TenantResolver { + + @Override + public String resolve(RoutingContext context) { + String tenantId = extractTenantId(context); + context.put("tenantId", tenantId); + return tenantId; + } +} +---- +=== + == Configuring the application [source,properties] @@ -210,8 +229,8 @@ Log in as the `admin` user to access the Keycloak Administration Console. Userna Now, follow the steps below to import the realms for the two tenants: -* Import the {quickstarts-tree-url}/security-openid-connect-multi-tenancy/config/default-tenant-realm.json[default-tenant-realm.json] to create the default realm -* Import the {quickstarts-tree-url}/security-openid-connect-multi-tenancy/config/tenant-a-realm.json[tenant-a-realm.json] to create the realm for the tenant `tenant-a`. +* Import the {quickstarts-tree-url}/security-openid-connect-multi-tenancy-quickstart/config/default-tenant-realm.json[default-tenant-realm.json] to create the default realm +* Import the {quickstarts-tree-url}/security-openid-connect-multi-tenancy-quickstart/config/tenant-a-realm.json[tenant-a-realm.json] to create the realm for the tenant `tenant-a`. For more details, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[create a new realm]. @@ -236,7 +255,7 @@ Then run it: [source,bash] ---- -java -jar ./target/security-openid-connect-multi-tenancy-quickstart-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- === Running in Native Mode @@ -342,6 +361,56 @@ public class CustomTenantConfigResolver implements TenantConfigResolver { The `OidcTenantConfig` returned from this method is the same used to parse the `oidc` namespace configuration from the `application.properties`. You can populate it using any of the settings supported by the `quarkus-oidc` extension. +== Tenant Resolution for OIDC 'web-app' applications + +Several options are available for selecting the tenant configuration which should be used to secure the current HTTP request for both `service` and `web-app` OIDC applications, such as: + +- Check URL paths, for example, a `tenant-service` configuration has to be used for the "/service" paths, while a `tenant-manage` configuration - for the "/management" paths +- Check HTTP headers, for example, with a URL path always being '/service', a header such as "Realm: service" or "Realm: management" can help selecting between the `tenant-service` and `tenant-manage` configurations +- Check URL query parameters - it can work similarly to the way the headers are used to select the tenant configuration + +All these options can be easily implemented with the custom `TenantResolver` and `TenantConfigResolver` implementations for the OIDC `service` applications. + +However, due to an HTTP redirect required to complete the code authentication flow for the OIDC `web-app` applications, a custom HTTP cookie may be needed to select the same tenant configuration before and after this redirect request because: + +- URL path may not be the same after the redirect request if a single redirect URL has been registered in the OIDC Provider - the original request path can be restored but after the the tenant configuration is resolved +- HTTP headers used during the original request are not available after the redirect +- Custom URL query parameters are restored after the redirect but after the tenant configuration is resolved + +One option to ensure the information for resolving the tenant configurations for `web-app` applications is available before and after the redirect is to use a cookie, for example: + +[source,java] +---- +package org.acme.quickstart.oidc; + +import java.util.List; + +import javax.enterprise.context.ApplicationScoped; + +import io.quarkus.oidc.TenantResolver; +import io.vertx.core.http.Cookie; +import io.vertx.ext.web.RoutingContext; + +@ApplicationScoped +public class CustomTenantResolver implements TenantResolver { + + @Override + public String resolve(RoutingContext context) { + List tenantIdQuery = context.queryParam("tenantId"); + if (!tenantIdQuery.isEmpty()) { + String tenantId = tenantIdQuery.get(0); + context.addCookie(Cookie.cookie("tenant", tenantId)); + return tenantId; + } else if (context.cookieMap().containsKey("tenant")) { + return context.getCookie("tenant").getValue(); + } + + return null; + } +} +---- + +[[disable-tenant]] == Disabling Tenant Configurations Custom `TenantResolver` and `TenantConfigResolver` implementations may return `null` if no tenant can be inferred from the current request and a fallback to the default tenant configuration is required. @@ -360,3 +429,4 @@ include::{generated-dir}/config/quarkus-oidc.adoc[opts=optional] * https://openid.net/connect/[OpenID Connect] * https://tools.ietf.org/html/rfc7519[JSON Web Token] * https://developers.google.com/identity/protocols/OpenIDConnect[Google OpenID Connect] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-openid-connect-web-authentication.adoc b/docs/src/main/asciidoc/security-openid-connect-web-authentication.adoc index 6e470c17605cb..9ee4523d5e681 100644 --- a/docs/src/main/asciidoc/security-openid-connect-web-authentication.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-web-authentication.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OpenID Connect to Protect Web Applications using Authorization Code Flow. @@ -48,12 +48,13 @@ The solution is located in the `security-openid-connect-web-authentication-quick First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-openid-connect-web-authentication-quickstart \ - -Dextensions="oidc" + -Dextensions="resteasy,oidc" \ + -DnoExamples cd security-openid-connect-web-authentication-quickstart ---- @@ -75,6 +76,82 @@ This will add the following to your `pom.xml`: ---- +== Writing the application + +Let's write a simple JAX-RS resource which has all the tokens returned in the authorization code grant response injected: + +[source,java] +---- +package org.acme.security.openid.connect.web.authentication; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import org.eclipse.microprofile.jwt.JsonWebToken; + +import io.quarkus.oidc.IdToken; +import io.quarkus.oidc.RefreshToken; + +@Path("/tokens") +public class TokenResource { + + /** + * Injection point for the ID Token issued by the OpenID Connect Provider + */ + @Inject + @IdToken + JsonWebToken idToken; + + /** + * Injection point for the Access Token issued by the OpenID Connect Provider + */ + @Inject + JsonWebToken accessToken; + + /** + * Injection point for the Refresh Token issued by the OpenID Connect Provider + */ + @Inject + RefreshToken refreshToken; + + /** + * Returns the tokens available to the application. This endpoint exists only for demonstration purposes, you should not + * expose these tokens in a real application. + * + * @return a map containing the tokens available to the application + */ + @GET + public String getTokens() { + StringBuilder response = new StringBuilder().append("") + .append("") + .append("
      "); + + Object userName = this.idToken.getClaim("preferred_username"); + + if (userName != null) { + response.append("
    • username: ").append(userName.toString()).append("
    • "); + } + + Object scopes = this.accessToken.getClaim("scope"); + + if (scopes != null) { + response.append("
    • scopes: ").append(scopes.toString()).append("
    • "); + } + + response.append("
    • refresh_token: ").append(refreshToken.getToken() != null).append("
    • "); + + return response.append("
    ").append("").append("").toString(); + } +} +---- + +This endpoint has ID, access and refresh tokens injected. It returns a `preferred_username` claim from the ID token, a `scope` claim from the access token and also a refresh token availability status. + +Note that you do not have to inject the tokens - it is only required if the endpoint needs to use the ID token to interact with the currently authenticated user or use the access token to access a downstream service on behalf of this user. + +Please see <> section below for more information. + == Configuring the application The OpenID Connect extension allows you to define the configuration using the `application.properties` file which should be located at the `src/main/resources` directory. @@ -97,7 +174,11 @@ The `quarkus.oidc.client-id` property references the `client_id` issued by the O The `quarkus.oidc.application-type` property is set to `web-app` in order to tell Quarkus that you want to enable the OpenID Connect Authorization Code Flow, so that your users are redirected to the OpenID Connect Provider to authenticate. For last, the `quarkus.http.auth.permission.authenticated` permission is set to tell Quarkus about the paths you want to protect. In this case, -all paths are being protected by a policy that ensures that only `authenticated` users are allowed to access. For more details check link:security[Security Guide]. +all paths are being protected by a policy that ensures that only `authenticated` users are allowed to access. For more details check link:security-authorization[Security Authorization Guide]. + +=== Configuring CORS + +If you plan to consume this application from another application running on a different domain, you will need to configure CORS (Cross-Origin Resource Sharing). Please read the link:http-reference#cors-filter[HTTP CORS documentation] for more details. == Starting and Configuring the Keycloak Server @@ -135,7 +216,7 @@ Then run it: [source,bash] ---- -java -jar ./target/security-openid-connect-web-authentication-quickstart-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- === Running in Native Mode @@ -187,6 +268,31 @@ In such cases a `quarkus.oidc.authentication.redirect-path` property has to be s If `quarkus.oidc.authentication.redirect-path` is set but the original request URL has to be restored after the user has been redirected back to a callback URL such as `http://localhost:8080/service/callback` then a `quarkus.oidc.authentication.restore-path-after-redirect` property has to be set to `true` which will restore the request URL such as `http://localhost:8080/service/1`, etc. +[[oidc-cookies]] +== Dealing with Cookies + +The OIDC adapter uses cookies to keep the session, code flow and post logout state. + +`quarkus.oidc.authentication.cookie-path` property is used to ensure the cookies are visible especially when you access the protected resources with overlapping or different roots, for example: + +* `/index.html` and `/web-app/service` +* `/web-app/service1` and `/web-app/service2` +* `/web-app1/service` and `/web-app2/service` + +`quarkus.oidc.authentication.cookie-path` is set to `/` by default but can be narrowed to the more specific root path such as `/web-app`. + +You can also set a `quarkus.oidc.authentication.cookie-path-header` property if the cookie path needs to be set dynamically. +For example, setting `quarkus.oidc.authentication.cookie-path-header=X-Forwarded-Prefix` means that the value of HTTP `X-Forwarded-Prefix` header will be used to set a cookie path. + +If `quarkus.oidc.authentication.cookie-path-header` is set but no configured HTTP header is available in the current request then the `quarkus.oidc.authentication.cookie-path` will be checked. + +If your application is deployed across multiple domains, make sure to set a `quarkus.oidc.authentication.cookie-domain` property for the session cookie be visible to all protected Quarkus services, for example, if you have 2 services deployed at: + +* https://whatever.wherever.company.net/ +* https://another.address.company.net/ + +then the `quarkus.oidc.authentication.cookie-domain` property must be set to `company.net`. + == Logout By default the logout is based on the expiration time of the ID Token issued by the OpenID Connect Provider. When the ID Token expires, the current user session at the Quarkus endpoint is invalidated and the user is redirected to the OpenID Connect Provider again to authenticate. If the session at the OpenID Connect Provider is still active, users are automatically re-authenticated without having to provide their credentials again. @@ -206,9 +312,36 @@ If the `quarkus.oidc.logout.post-logout-path` is set then a `q_post_logout` cook Note that a cookie name will vary when using link:security-openid-connect-multitenancy[OpenID Connect Multi-Tenancy]. For example, it will be named `q_post_logout_tenant_1` for a tenant with a `tenant_1` id, etc. +Here is an example of how to configure an RP initiated logout flow: + +[source,properties] +---- +quarkus.oidc.auth-server-url=http://localhost:8180/auth/realms/quarkus +quarkus.oidc.client-id=frontend +quarkus.oidc.application-type=web-app + +quarkus.oidc.tenant-logout.logout.path=/logout +quarkus.oidc.tenant-logout.logout.post-logout-path=/postlogout + +# Only the authenticated users can initiate a logout: +quarkus.http.auth.permission.authenticated.paths=/logout +quarkus.http.auth.permission.authenticated.policy=authenticated + +# Logged out users should be returned to the /welcome.html site which will offer an option to re-login: +quarkus.http.auth.permission.authenticated.paths=/welcome.html +quarkus.http.auth.permission.authenticated.policy=permit +---- + +You may also need to set `quarkus.oidc.authentication.cookie-path` to a path value common to all of the application resources which is `/` in this example. +See <> for more information. + +[[access_id_and_access_tokens]] == Accessing ID and Access Tokens -ID Token is always a JWT token. One can access ID Token claims by injecting `JsonWebToken` with an `IdToken` qualifier: +OIDC Code Authentication Mechanism acquires three tokens during the authorization code flow: https://openid.net/specs/openid-connect-core-1_0.html#IDToken[IDToken], Access Token and Refresh Token. + +ID Token is always a JWT token and is used to represent a user authentication with the JWT claims. +One can access ID Token claims by injecting `JsonWebToken` with an `IdToken` qualifier: [source, java] ---- @@ -268,10 +401,19 @@ Note that `AccessTokenCredential` will have to be used if the Access Token issue Injection of the `JsonWebToken` and `AccessTokenCredential` is supported in both `@RequestScoped` and `@ApplicationScoped` contexts. +RefreshToken is only used to refresh the current ID and access tokens as part of its link:#session_management[session management] process. + +[[user-info]] == User Info -Set `quarkus.oidc.user-info-required=true` if a UserInfo JSON object from the OIDC userinfo endpoint has to be requested. -This will make an `io.quarkus.oidc.UserInfo` (which is a simple `javax.json.JsonObject` wrapper) object accessible as a SecurityIdentity `userinfo` attribute. +If IdToken does not provide enough information about the currently authenticated user then you can set a `quarkus.oidc.user-info-required=true` property for a https://openid.net/specs/openid-connect-core-1_0.html#UserInfo[UserInfo] JSON object from the OIDC userinfo endpoint to be requested. + +A request will be sent to the OpenId Provider UserInfo endpoint using the access token returned with the authorization code grant response and an `io.quarkus.oidc.UserInfo` (a simple `javax.json.JsonObject` wrapper) object will be created. `io.quarkus.oidc.UserInfo` can be either injected or accessed as a SecurityIdentity `userinfo` attribute. + +[[config-metadata]] +== Configuration Metadata + +The discovered link:https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata[OpenId Connect Configuration Metadata] is represented by `io.quarkus.oidc.OidcConfigurationMetadata` and can be either injected or accessed as a `SecurityIdentity` `configuration-metadata` attribute. == Token Claims And SecurityIdentity Roles @@ -281,32 +423,129 @@ Note if you use Keycloak then you should set a Microprofile JWT client scope for If only the access token contains the roles and this access token is not meant to be propagated to the downstream endpoints then set `quarkus.oidc.roles.source=accesstoken`. -If UserInfo is the source of the roles then set `quarkus.oidc.user-info-required=true` and `quarkus.oidc.roles.source=userinfo`, and if needed, `quarkus.oidc.roles.role-claim-path`. +If UserInfo is the source of the roles then set `quarkus.oidc.authentication.user-info-required=true` and `quarkus.oidc.roles.source=userinfo`, and if needed, `quarkus.oidc.roles.role-claim-path`. + +Additionally a custom `SecurityIdentityAugmentor` can also be used to add the roles as documented link:security#security-identity-customization[here]. + +[[session-management]] +== Session Management + +If you have a link:security-openid-connect#single-page-applications[Single Page Application for Service Applications] where your OpenId Connect Provider script such as `keycloak.js` is managing an authoriization code flow then that script will also control the SPA authentication session lifespan. + +If your work with a Quarkus OIDC `web-app` application then it is Quarkus OIDC Code Authentication mechanism which is managing the user session lifespan. + +The session age is calculated by adding the lifespan value of the current IDToken and the values of the `quarkus.oidc.authentication.session-age-extension` and `quarkus.oidc.token.lifespan-grace` properties. Of the last two properties only `quarkus.oidc.authentication.session-age-extension` should be used to significantly extend the session lifespan if required since `quarkus.oidc.token.lifespan-grace` is only meant for taking some small clock skews into consideration. + +When the currently authenticated user returns to the protected Quarkus endpoint and the ID token associated with the session cookie has expired then, by default, the user will be auto-redirected to the OIDC Authorization endpoint to re-authenticate. Most likely the OIDC provider will challenge the user again though not necessarily if the session between the user and this OIDC provider is still active which may happen if it is configured to last longer than the ID token. + +If the `quarkus.oidc.token.refresh-expired` then the expired ID token (as well as the access token) will be refreshed using the refresh token returned with the authorization code grant response. This refresh token may also be recycled (refreshed) itself as part of this process. As a result the new session cookie will be created and the session will be extended. + +Note, `quarkus.oidc.authentication.session-age-extension` can be important when dealing with expired ID tokens, when the user is not very active. In such cases, if the ID token expires, then the session cookie may not be returned back to the Quarkus endpoint during the next user request and Quarkus will assume it is the first authentication request. Therefore using `quarkus.oidc.authentication.session-age-extension` is important if you need to have even the expired ID tokens refreshed. + +You can also complement refreshing the expired ID tokens by proactively refreshing the valid ID tokens which are about to be expired within the `quarkus.oidc.token.refresh-token-time-skew` value. If, during the current user request, it is calculated that the current ID token will expire within this `quarkus.oidc.token.refresh-token-time-skew` then it will be refreshed and the new session cookie will be created. This property should be set to a value which is less than the ID token lifespan; the closer it is to this lifespan value the more often the ID token will be refreshed. + +You can have this process further optimized by having a simple JavaScript function periodically emulating the user activity by pinging your Quarkus endpoint thus minimizing the window during which the user may have to be re-authenticated. + +Note this user session can not be extended forever - the returning user with the expired ID token will have to re-authenticate at the OIDC provider endpoint once the refresh token has expired. + +=== TokenStateManager + +OIDC `CodeAuthenticationMechanism` is using the default `io.quarkus.oidc.TokenStateManager' interface implementation to keep the ID, access and refresh tokens returned in the authorization code or refresh grant responses in a session cookie. It makes Quarkus OIDC endpoints completely stateless. + +Note that some endpoints do not require the access token. An access token is only required if the endpoint needs to retrieve `UserInfo` or access the downstream service with this access token or use the roles associated with the access token (the roles in the ID token are checked by default). In such cases you can set either `quarkus.oidc.state-session-manager.strategy=id-refresh-token` (keep ID and refresh tokens only) or `quarkus.oidc.state-session-manager.strategy=id-token` (keep ID token only). + +If the ID, access and refresh tokens are JWT tokens then combining all of them (if the strategy is the default `keep-all-tokens`) or only ID and refresh tokens (if the strategy is `id-refresh-token`) may produce a session cookie value larger than 4KB and the browsers may not be able to keep this cookie. +In such cases, you can use `quarkus.oidc.token-state-manager.split-tokens=true` to have a unique session token per each of these tokens. + +Register your own `io.quarkus.oidc.TokenStateManager' implementation as an `@ApplicationScoped` CDI bean if you need to customize the way the tokens are associated with the session cookie. For example, you may want to keep the tokens in a database and have only a database pointer stored in a session cookie. Note though that it may present some challenges in making the tokens available across multiple microservices nodes. + +== Listening to important authentication events + +One can register `@ApplicationScoped` bean which will observe important OIDC authentication events. The listener will be updated when a user has logged in for the first time or re-authenticated, as well as when the session has been refreshed. More events may be reported in the future. For example: + +[source, java] +---- +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.event.Observes; + +import io.quarkus.oidc.IdTokenCredential; +import io.quarkus.oidc.SecurityEvent; +import io.quarkus.security.identity.AuthenticationRequestContext; +import io.vertx.ext.web.RoutingContext; + +@ApplicationScoped +public class SecurityEventListener { + + public void event(@Observes SecurityEvent event) { + String tenantId = event.getSecurityIdentity().getAttribute("tenant-id"); + RoutingContext vertxContext = event.getSecurityIdentity().getCredential(IdTokenCredential.class).getRoutingContext(); + vertxContext.put("listener-message", String.format("event:%s,tenantId:%s", event.getEventType().name(), tenantId)); + } +} +---- == Single Page Applications Please check if implementing SPAs the way it is suggested in the link:security-openid-connect#single-page-applications[Single Page Applications for Service Applications] section can meet your requirements. -If you do prefer to use SPA and `XMLHttpRequest`(XHR) with Quarkus `web-app` applications then please be aware that OpenId Connect Providers may not support CORS for Authorization endpoints where the users - are authenticated after a redirect from Quarkus which will lead to the authentication failures if the Quarkus `web-app` application and OpenId Connect Provider are hosted on the different HTTP domains/ports. +If you prefer to use SPA and JavaScript API such as `Fetch` or `XMLHttpRequest`(XHR) with Quarkus web applications, please be aware that OpenID Connect Providers may not support CORS for Authorization endpoints where the users are authenticated after a redirect from Quarkus. This will lead to authentication failures if the Quarkus application and the OpenID Connect Provider are hosted on the different HTTP domains/ports. -In such cases one needs to set the `quarkus.oidc.authentication.xhr-auto-redirect` property to `false` which will instruct Quarkus to return a `499` status code and `WWW-Authenticate` header with the `OIDC` value and the browser script needs to be updated to set "X-Requested-With" header with the `XMLHttpRequest` value and reload the last requested page in case of `499`, for example: +In such cases, set the `quarkus.oidc.authentication.java-script-auto-redirect` property to `false` which will instruct Quarkus to return a `499` status code and `WWW-Authenticate` header with the `OIDC` value. The browser script also needs to be updated to set `X-Requested-With` header with the `JavaScript` value and reload the last requested page in case of `499`, for example: [source,javascript] ---- Future callQuarkusService() async { - Map headers = Map.fromEntries([MapEntry("X-Requested-With", "XMLHttpRequest")]); + Map headers = Map.fromEntries([MapEntry("X-Requested-With", "JavaScript")]); await http .get("https://localhost:443/serviceCall") .then((response) { if (response.statusCode == 499) { - window.location.assign(https://localhost.com:443/serviceCall); + window.location.assign("https://localhost.com:443/serviceCall"); } }); } ---- +== Running behind a reverse proxy + +OIDC authentication mechanism can be affected if your Quarkus application is running behind a reverse proxy/gateway/firewall when HTTP `Host` header may be reset to the internal IP address, HTTPS connection may be terminated, etc. For example, an authorization code flow `redirect_uri` parameter may be set to the internal host instead of the expected external one. + +In such cases configuring Quarkus to recognize the original headers forwarded by the proxy will be required, see link:vertx#reverse-proxy[Running behind a reverse proxy] Vert.x documentation section for more information. + +`quarkus.oidc.authentication.force-redirect-https-scheme` property may also be used when the Quarkus application is running behind a SSL terminating reverse proxy. + +== Cloud Services + +=== Google Cloud + +You can have Quarkus OIDC `web-app` applications access **Google Cloud services** such as **BigQuery** on behalf of the currently authenticated users who have enabled OpendId Connect (Authorization Code Flow) permissions to such services in their Google Developer Consoles. + +It is super easy to do with https://github.com/quarkiverse[Quarkiverse] https://github.com/quarkiverse/quarkiverse-google-cloud-services[Google Cloud Services], only add +the https://github.com/quarkiverse/quarkiverse-google-cloud-services/releases/latest[latest tag] service dependency, for example: + +[source,xml] +---- + + io.quarkiverse.googlecloudservices + quarkus-google-cloud-bigquery + ${quarkiverse.googlecloudservices.version} + +---- + +and configure Google OIDC properties: + +[source, properties] +---- +quarkus.oidc.auth-server-url=https://accounts.google.com +quarkus.oidc.application-type=web-app +quarkus.oidc.client-id={GOOGLE_CLIENT_ID} +quarkus.oidc.credentials.secret={GOOGLE_CLIENT_SECRET} +quarkus.oidc.token.issuer=https://accounts.google.com +---- + +== Token Propagation +Please see link:security-openid-connect-client#token-propagation[Token Propagation] section about the Authorization Code Flow access token propagation to the downstream services. == Configuration Reference @@ -317,3 +556,5 @@ include::{generated-dir}/config/quarkus-oidc.adoc[opts=optional] * https://www.keycloak.org/documentation.html[Keycloak Documentation] * https://openid.net/connect/[OpenID Connect] * https://tools.ietf.org/html/rfc7519[JSON Web Token] +* link:security-openid-connect-client[Quarkus - Using OpenID Connect and OAuth2 Client and Filters to manage access tokens] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-openid-connect.adoc b/docs/src/main/asciidoc/security-openid-connect.adoc index 19a1502b9a7f5..15941f07173b4 100644 --- a/docs/src/main/asciidoc/security-openid-connect.adoc +++ b/docs/src/main/asciidoc/security-openid-connect.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using OpenID Connect to Protect Service Applications using Bearer Token Authorization @@ -14,6 +14,8 @@ Bearer Token Authorization is the process of authorizing HTTP requests based on Please read the link:security-openid-connect-web-authentication[Using OpenID Connect to Protect Web Applications] guide if you need to authenticate and authorize the users using OpenId Connect Authorization Code Flow. +If you use Keycloak and Bearer tokens then also see the link:security-keycloak-authorization[Using Keycloak to Centralize Authorization] guide. + Please read the link:security-openid-connect-multitenancy[Using OpenID Connect Multi-Tenancy] guide how to support multiple tenants. == Prerequisites @@ -55,12 +57,13 @@ The solution is located in the `security-openid-connect-quickstart` {quickstarts First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=security-openid-connect-quickstart \ - -Dextensions="oidc, resteasy-jsonb" + -Dextensions="resteasy,oidc,resteasy-jackson" \ + -DnoExamples cd security-openid-connect-quickstart ---- @@ -97,8 +100,6 @@ import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; import org.jboss.resteasy.annotations.cache.NoCache; import io.quarkus.security.identity.SecurityIdentity; @@ -112,7 +113,6 @@ public class UsersResource { @GET @Path("/me") @RolesAllowed("user") - @Produces(MediaType.APPLICATION_JSON) @NoCache public User me() { return new User(securityIdentity); @@ -168,7 +168,7 @@ If you need to access `JsonWebToken` claims, you may simply inject the token its package org.acme.security.openid.connect; import org.eclipse.microprofile.jwt.JsonWebToken; - +import javax.inject.Inject; import javax.annotation.security.RolesAllowed; import javax.ws.rs.GET; import javax.ws.rs.Path; @@ -251,7 +251,7 @@ Then run it: [source,bash] ---- -java -jar ./target/security-openid-connect-quickstart-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- === Running in Native Mode @@ -329,6 +329,18 @@ export access_token=$(\ ) ---- +[[user-info]] +== User Info + +Set `quarkus.oidc.user-info-required=true` if a UserInfo JSON object from the OIDC userinfo endpoint has to be requested. +A request will be sent to the OpenId Provider UserInfo endpoint and an `io.quarkus.oidc.UserInfo` (a simple `javax.json.JsonObject` wrapper) object will be created. +`io.quarkus.oidc.UserInfo` can be either injected or accessed as a SecurityIdentity `userinfo` attribute. + +[[config-metadata]] +== Configuration Metadata + +The discovered link:https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata[OpenId Connect Configuration Metadata] is represented by `io.quarkus.oidc.OidcConfigurationMetadata` and can be either injected or accessed as a `SecurityIdentity` `configuration-metadata` attribute. + == Token Claims And SecurityIdentity Roles SecurityIdentity roles can be mapped from the verified JWT access tokens as follows: @@ -341,9 +353,11 @@ SecurityIdentity roles can be mapped from the verified JWT access tokens as foll If the token is opaque (binary) then a `scope` property from the remote token introspection response will be used. +If UserInfo is the source of the roles then set `quarkus.oidc.authentication.user-info-required=true` and `quarkus.oidc.roles.source=userinfo`, and if needed, `quarkus.oidc.roles.role-claim-path`. + Additionally a custom `SecurityIdentityAugmentor` can also be used to add the roles as documented link:security#security-identity-customization[here]. -[[oidc-single-page-applications]] +[[single-page-applications]] == Single Page Applications Single Page Application (SPA) typically uses `XMLHttpRequest`(XHR) and the Java Script utility code provided by the OpenId Connect provider to acquire a bearer token and use it @@ -391,9 +405,14 @@ For example, here is how you can use `keycloak.js` to authenticate the users and ---- +== Token Propagation + +Please see link:security-openid-connect-client#token-propagation[Token Propagation] section about the Bearer access token propagation to the downstream services. == References * https://www.keycloak.org/documentation.html[Keycloak Documentation] * https://openid.net/connect/[OpenID Connect] * https://tools.ietf.org/html/rfc7519[JSON Web Token] +* link:security-openid-connect-client[Quarkus - Using OpenID Connect and OAuth2 Client and Filters to manage access tokens] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-properties.adoc b/docs/src/main/asciidoc/security-properties.adoc index dba58fb1a9b9f..8312095ce1aec 100644 --- a/docs/src/main/asciidoc/security-properties.adoc +++ b/docs/src/main/asciidoc/security-properties.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Security with .properties File @@ -134,3 +134,7 @@ quarkus.security.users.embedded.roles.noadmin=user ---- <1> User `scott` has roles `Admin`, `admin`, `Tester`, and `user` <2> User `stuart` has roles `admin` and `user` + +== References + +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security-testing.adoc b/docs/src/main/asciidoc/security-testing.adoc index eca37da1d0478..ff5e16408d395 100644 --- a/docs/src/main/asciidoc/security-testing.adoc +++ b/docs/src/main/asciidoc/security-testing.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Security Testing @@ -9,11 +9,12 @@ include::./attributes.adoc[] This document describes how to test Quarkus Security. +[[configuring-user-information]] == Configuring User Information You can use link:security-properties[quarkus-elytron-security-properties-file] for testing security. This supports both embedding user info in `application.properties` and standalone properties files. -For example, the following configuration will allow for configuring the users in both the production where OAuth2 is required and development modes using link:https://quarkus.io/guides/config#configuration-profiles[Configuration Profiles]. +For example, the following configuration will allow for configuring the users in both the production where OAuth2 is required and development modes using link:config#configuration-profiles[Configuration Profiles]. [source,properties] ---- @@ -95,5 +96,8 @@ for example by setting `quarkus.http.auth.basic=true` or `%test.quarkus.http.aut == Use Wiremock for Integration Testing You can also use Wiremock to mock the authorization OAuth2 and OIDC services: -See link:security-oauth#integration-testing[OAuth2 Integration testing] for more details. +See link:security-oauth2#integration-testing[OAuth2 Integration testing] for more details. +== References + +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/security.adoc b/docs/src/main/asciidoc/security.adoc index e1f96671cdcd1..e0266ec6ff7d4 100644 --- a/docs/src/main/asciidoc/security.adoc +++ b/docs/src/main/asciidoc/security.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Security Architecture and Guides @@ -20,7 +20,7 @@ complete the conversion of these credentials to `SecurityIdentity`. For example, the credentials may be coming with the HTTP `Authorization` header, client HTTPS certificates or cookies. -`IdentityProvider` verifies the authentication credentials and maps them to `SecurityIdentity` which contains the user name, roles, the original authentication credentials, and other attributes. +`IdentityProvider` verifies the authentication credentials and maps them to `SecurityIdentity` which contains the username, roles, the original authentication credentials, and other attributes. For every authenticated resource, you can inject a `SecurityIdentity` instance to get the authenticated identity information. @@ -42,7 +42,9 @@ Quarkus provides Mutual TLS authentication so that you can authenticate users ba Please see link:security-built-in-authentication#mutual-tls[Mutual TLS Authentication] for more information. -== OpenId Connect +=== OpenId Connect, JWT, OAuth2 + +==== OpenId Connect `quarkus-oidc` extension provides a reactive, interoperable, multi-tenant enabled OpenId Connect adapter which supports `Bearer Token` and `Authorization Code Flow` authentication mechanisms. @@ -62,10 +64,31 @@ Both `quarkus-oidc` `Bearer` and `Authorization Code Flow` Authentication mechan See link:security-openid-connect-multitenancy[Using OpenID Connect Multi-Tenancy] for more information about multiple tenants which can support `Bearer` or `Authorization Code Flow` authentication mechanism and configured statically or dynamically. +[NOTE] +==== +If you would like to have Quarkus OIDC extension enabled at runtime then set `quarkus.oidc.tenant-enabled=false` at build time and re-enable it at runtime using a system property. +See also link:security-openid-connect-multitenancy#disable-tenant[Disabling Tenant Configurations] for more information about managing the individual tenant configurations in the multi-tenant OIDC deployments. +==== + If you use Keycloak and Bearer tokens then also see the link:security-keycloak-authorization[Using Keycloak to Centralize Authorization] guide. +[NOTE] +==== +If you need to configure Keycloak programmatically then consider using https://www.keycloak.org/docs/latest/server_development/#admin-rest-api[Keycloak Admin REST API] with the help of the `quarkus-keycloak-admin-client` extension. +==== + +==== OpenId Connect Client and Filters + +`quarkus-oidc-client` extension provides `OidcClient` for acquiring and refreshing access tokens from OpenId Connect and OAuth2 providers which support `client-credentials`, `password` and `refresh_token` token grants. + +`quarkus-oidc-client-filter` extension depends on the `quarkus-oidc-client` extension and provides JAX-RS `OidcClientRequestFilter` which sets the access token acquired by `OidcClient` as an HTTP `Authorization` header's `Bearer` scheme value. This filter can be registered with MP RestClient implementations injected into the current Quarkus endpoint but it is not related to the authentication requirements of this service endpoint. For example, it can be a public endpoint or it can be protected with MTLS - the important point is that this Quarkus endpoint does not have to be protected itself with the Quarkus OpenId Connect adapter. + +`quarkus-oidc-token-propagation` extension depends on the `quarkus-oidc` extension and provides JAX-RS `TokenCredentialRequestFilter` which sets the OpenId Connect Bearer or Authorization Code Flow access token as an HTTP `Authorization` header's `Bearer` scheme value. This filter can be registered with MP RestClient implementations injected into the current Quarkus endpoint and the Quarkus endpoint must be protected itself with the Quarkus OpenId Connect adapter. This filter can be used to propagate the access token to the downstream services. + +See the link:security-openid-connect-client[Using OpenID Connect and OAuth2 Client] guide for more information. + [[smallrye-jwt]] -== SmallRye JWT +==== SmallRye JWT `quarkus-smallrye-jwt` provides Microprofile JWT 1.1.1 implementation and many more options to verify signed and encrypted `JWT` tokens and represent them as `org.eclipse.microprofile.jwt.JsonWebToken`. @@ -75,13 +98,82 @@ Additionally it provides `JWT Generation API` for creating `signed`, `inner-sign See the link:security-jwt[Using SmallRye JWT] guide for more information. -== OAuth2 +==== OAuth2 `quarkus-elytron-security-oauth2` provides an alternative to `quarkus-oidc` Bearer Token Authentication Mechanism. It is based on `Elytron` and is primarily meant for introspecting the opaque tokens remotely. See the link:security-oauth2[Using OAuth2] guide for more information. -== LDAP +==== Choosing between OpenId Connect, SmallRye JWT and OAuth2 extensions + +`quarkus-oidc` extension requires an OpenId Connect provider such as Keycloak which can be used to verify the Bearer tokens or authenticate the end users with the Authorization Code flow. In both cases `quarkus-oidc` requires a connection to this OpenId Connect provider. + +`quarkus-oidc` is the only option when the user authentication via Authorization Code flow or supporting multiple tenants is required. It can also request a UserInfo using both Authorization Code Flow and Bearer access tokens. + +When the Bearer tokens have to be verified then `quarkus-oidc`, `quarkus-smallrye-jwt` and `quarkus-elytron-security-oauth2` can be used. + +If you have Bearer tokens in a JWT format then all these 3 extensions can be used. Both `quarkus-oidc` and `quarkus-smallrye-jwt` support refreshing the JsonWebKey (JWK) set when the OpenId Connect provider rotates the keys, therefore `quarkus-oidc` or `quarkus-smallrye-jwt` should be used for verifying JWT tokens if the remote token introspection has to be avoided or not supported by the providers. + +`quarkus-smallrye-jwt` does not support the remote introspection of the opaque tokens or even JWT tokens - it always relies on the locally available keys - possibly fetched from the OpenId Connect provider. So if you need to introspect the JWT tokens remotely then both `quarkus-oidc` and `quarkus-elytron-security-oauth2` will work. Both extensions also support the verification of the opaque/binary tokens via the remote introspection. + +`quarkus-oidc` and `quarkus-smallrye-jwt` can have both JWT and opaque tokens injected into the endpoint code - the injected JWT tokens may offer a richer information about the user. All extensions can have the tokens injected as `Principal`. + +`quarkus-smallrye-jwt` supports more key formats than `quarkus-oidc`. The latter will only use the JWK-formatted keys which are part of a JWK set. The former - can also work with PEM keys. + +`quarkus-smallrye-jwt` can handle locally not only signed but also inner-signed-and-encrypted or only encrypted tokens. In fact `quarkus-oidc` and `quarkus-elytron-security-oauth2` can verify such tokens too but only by treating them as opaque tokens and verifying them via the remote introspection. + +`quarkus-elytron-security-oauth2` is the best choice if you need a light weight library for the remote introspection of either opaque or JWT tokens. + +Note that a choice of using the opaque versus JWT token format is often driven by the architectural considerations. Opaque tokens are usually much shorter than JWT tokens but they require maintaining most of the token associated state in the provider database - the opaque tokens are effectively the database pointers. JWT tokens are significantly longer than the opaque tokens - but the providers are effectively delegating storing most of the token associated state to the client by storing it as the token claims and either signing and/or encrypting them. + +Below is a summary of the options. + +|=== +| | quarkus-oidc| quarkus-smallrye-jwt | quarkus-elytron-security-oauth2 + +|Bearer JWT verification is required +|Local Verification or Introspection +|Local Verification +|Introspection +|Bearer Opaque Token verification is required +|Introspection +|No +|Introspection +|Refreshing JsonWebKey set for verifying JWT tokens +|Yes +|Yes +|No +|Represent token as Principal +|Yes +|Yes +|Yes +|Inject JWT as MP JWT JsonWebToken +|Yes +|Yes +|No +|Authorization Code Flow +|Yes +|No +|No +|Multi-tenancy +|Yes +|No +|No +|UserInfo support +|Yes +|No +|No +|Pem Key format support +|No +|Yes +|No +|InnerSigned/Encrypted or Encrypted tokens +|Introspection +|Local Verification +|Introspection +|=== + +=== LDAP Please see the link:security-ldap[Authenticate with LDAP] guide for more information about LDAP authentication mechanism. @@ -96,6 +188,7 @@ For example, `quarkus-oidc` uses its own `IdentityProvider` to convert a token t If you use `Basic` or `Form` HTTP-based authentication then you have to add an `IdentityProvider` which can convert a user name and password to `SecurityIdentity`. See link:security-jpa[JPA IdentityProvider] and link:security-jdbc[JDBC IdentityProvider] for more information. +You can also use link:security-testing#configuring-user-information[User Properties IdentityProvider] for testing. == Combining Authentication Mechanisms @@ -119,11 +212,31 @@ Quarkus Security is highly customizable. One can register custom ``HttpAuthentic See link:security-customization[Security Customization] for more information about customizing Quarkus Security and other useful tips about the reactive security, registering the security providers, etc. +== Secure connections with SSL + +See the link:http-reference#ssl[Supporting secure connections with SSL] guide for more information. + +== Cross-Origin Resource Sharing + +If you plan to make your Quarkus application accessible to another application running on a different domain, you will need to configure CORS (Cross-Origin Resource Sharing). Please read the link:http-reference#cors-filter[HTTP CORS documentation] for more information. + +== SameSite cookies + +Please see link:vertx#same-site-cookie[SameSite cookies] for information about adding a https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie/SameSite[SameSite] cookie property to any of the cookies set by a Quarkus endpoint. + == Testing See link:security-testing[Security Testing] for more information about testing Quarkus Security. == Secret Engines - +=== Vault Quarkus provides a very comprehensive HashiCorp Vault support, please see the link:vault[Quarkus and HashiCorp Vault] documentation for more information. +== National Vulnerability Database + +Most of Quarkus tags have been registered in link:https://nvd.nist.gov[National Vulnerability Database] (NVD) using a Common Platform Enumeration (CPE) name format. +All registered Quarkus CPE names can be found using link:https://nvd.nist.gov/products/cpe/search/results?namingFormat=2.3&keyword=quarkus[this search query]. +If a Quarkus tag represented by the given CPE name entry is affected by some CVE then you'll be able to follow a provided link to that CVE. + +We will be asking the NVD CPE team to update the list as well as link Quarkus CPE name entries with the related CVEs on a regular basis. +If you work with a plugin like OWASP plugin which is using NVD feeds to detect the vulnerabilities at the application build time and you see a false positive reported then please re-open link:https://github.com/quarkusio/quarkus/issues/2611[this issue] and provide the details. diff --git a/docs/src/main/asciidoc/software-transactional-memory.adoc b/docs/src/main/asciidoc/software-transactional-memory.adoc index a36a4d94aa10b..5295c99cb8037 100644 --- a/docs/src/main/asciidoc/software-transactional-memory.adoc +++ b/docs/src/main/asciidoc/software-transactional-memory.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Using Software Transactional Memory in Quarkus @@ -102,7 +102,7 @@ To use the extension include it as a dependency in your application pom: == Defining STM-aware classes -In order for the STM subsytem to have knowledge about which classes are to be managed within the context +In order for the STM subsystem to have knowledge about which classes are to be managed within the context of transactional memory it is necessary to provide a minimal level of instrumentation. This occurs by categorising STM-aware and STM-unaware classes through an interface boundary; specifically all STM-aware objects must be instances of classes which inherit from interfaces that themselves have been annotated to identify them @@ -180,7 +180,7 @@ concurrency control. <2> Then you create an instance that implements `FlightService`. You should not use it directly at this stage because access to it is not being managed by the STM subsystem. <3> To obtain a managed instance, pass the original object to the STM `container` which then returns a reference - through which you will be able perform transactional operations. This reference can be used safely from multiple threads. + through which you will be able to perform transactional operations. This reference can be used safely from multiple threads. == Defining transaction boundaries diff --git a/docs/src/main/asciidoc/spring-boot-properties.adoc b/docs/src/main/asciidoc/spring-boot-properties.adoc index 2e224ab68450e..23f4605cc748d 100644 --- a/docs/src/main/asciidoc/spring-boot-properties.adoc +++ b/docs/src/main/asciidoc/spring-boot-properties.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Accessing application properties with Spring Boot properties API @@ -35,14 +35,14 @@ The solution is located in the `spring-boot-properties-quickstart` {quickstarts- First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-boot-properties-quickstart \ -DclassName="org.acme.spring.boot.properties.GreetingResource" \ -Dpath="/greeting" \ - -Dextensions="spring-boot-properties" + -Dextensions="resteasy,spring-boot-properties" cd spring-boot-properties-quickstart ---- @@ -112,7 +112,7 @@ Here `text` field is public, but it could also be a private field with getter an Because `text` does not have a default value it is considered required and unless it is defined in a configuration file (`application.properties` by default) your application will fail to start. Define this property in your `src/main/resources/application.properties` file: -[source,shell] +[source,properties] ---- # Your configuration properties greeting.text = hello @@ -153,7 +153,7 @@ Open your browser to http://localhost:8080/greeting. Changing the configuration file is immediately reflected. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. == Default values @@ -350,7 +350,7 @@ This is especially useful when then number of properties grows. Because of the additional class, our property names have changed. Let's update the properties file and the `GreetingResource` class. -[source,shell] +[source,properties] ---- # Your configuration properties greeting.message.text = hello @@ -387,6 +387,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-di[Quarkus - Extension for Spring DI] * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-security[Quarkus - Extension for Spring Security] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-cache[Quarkus - Extension for Spring Cache] diff --git a/docs/src/main/asciidoc/spring-cache.adoc b/docs/src/main/asciidoc/spring-cache.adoc index bbe0ea000e8bc..6637d38c6c646 100644 --- a/docs/src/main/asciidoc/spring-cache.adoc +++ b/docs/src/main/asciidoc/spring-cache.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Quarkus Extension for Spring Cache API @@ -29,14 +29,14 @@ To complete this guide, you need: First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-cache-quickstart \ -DclassName="org.acme.spring.cache.GreeterResource" \ -Dpath="/greeting" \ - -Dextensions="spring-di,spring-cache" + -Dextensions="resteasy,spring-di,spring-cache" cd spring-cache-quickstart ---- @@ -146,8 +146,6 @@ import java.util.List; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; import org.jboss.resteasy.annotations.jaxrs.QueryParam; @@ -158,7 +156,6 @@ public class WeatherForecastResource { WeatherForecastService service; @GET - @Produces(MediaType.APPLICATION_JSON) public WeatherForecast getForecast(@QueryParam String city, @QueryParam long daysInFuture) { // <1> long executionStart = System.currentTimeMillis(); List dailyForecasts = Arrays.asList( @@ -276,6 +273,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-security[Quarkus - Extension for Spring Security] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] * link:spring-scheduled[Quarkus - Extension for Spring Scheduled] diff --git a/docs/src/main/asciidoc/spring-cloud-config-client.adoc b/docs/src/main/asciidoc/spring-cloud-config-client.adoc index 1a27ea32046b8..ad49ab9c26f2f 100644 --- a/docs/src/main/asciidoc/spring-cloud-config-client.adoc +++ b/docs/src/main/asciidoc/spring-cloud-config-client.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Reading properties from Spring Cloud Config Server @@ -35,7 +35,7 @@ The end result of that process is a running Config Server that will provide the First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -150,6 +150,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-di[Quarkus - Extension for Spring DI] * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-security[Quarkus - Extension for Spring Security] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] * link:spring-cache[Quarkus - Extension for Spring Cache] @@ -158,5 +159,5 @@ Quarkus has more Spring compatibility features. See the following guides for mor [[spring-cloud-config-client-configuration-reference]] == Spring Cloud Config Client Reference -include::{generated-dir}/config/quarkus-spring-cloud-config.adoc[leveloffset=+1, opts=optional] +include::{generated-dir}/config/quarkus-spring-cloud-config-client.adoc[leveloffset=+1, opts=optional] diff --git a/docs/src/main/asciidoc/spring-data-jpa.adoc b/docs/src/main/asciidoc/spring-data-jpa.adoc index 54d4aadfda32e..e8de3fb6d4032 100644 --- a/docs/src/main/asciidoc/spring-data-jpa.adoc +++ b/docs/src/main/asciidoc/spring-data-jpa.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Extension for Spring Data API @@ -35,14 +35,14 @@ The solution is located in the `spring-data-jpa-quickstart` {quickstarts-tree-ur First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-data-jpa-quickstart \ -DclassName="org.acme.spring.data.jpa.FruitResource" \ -Dpath="/greeting" \ - -Dextensions="spring-data-jpa,resteasy-jsonb,quarkus-jdbc-postgresql" + -Dextensions="resteasy,spring-data-jpa,resteasy-jackson,quarkus-jdbc-postgresql" cd spring-data-jpa-quickstart ---- @@ -144,9 +144,10 @@ This configuration assumes that PostgreSQL will be running locally. A very easy way to accomplish that is by using the following Docker command: -``` +[source,bash] +---- docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name quarkus_test -e POSTGRES_USER=quarkus_test -e POSTGRES_PASSWORD=quarkus_test -e POSTGRES_DB=quarkus_test -p 5432:5432 postgres:11.5 -``` +---- If you plan on using a different setup, please change your `application.properties` accordingly. @@ -161,6 +162,7 @@ INSERT INTO fruit(id, name, color) VALUES (1, 'Cherry', 'Red'); INSERT INTO fruit(id, name, color) VALUES (2, 'Apple', 'Red'); INSERT INTO fruit(id, name, color) VALUES (3, 'Banana', 'Yellow'); INSERT INTO fruit(id, name, color) VALUES (4, 'Avocado', 'Green'); +INSERT INTO fruit(id, name, color) VALUES (5, 'Strawberry', 'Red'); ---- Hibernate ORM will execute these queries on application startup. @@ -201,7 +203,6 @@ import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; -import javax.ws.rs.Produces; import org.jboss.resteasy.annotations.jaxrs.PathParam; @@ -218,7 +219,6 @@ public class FruitResource { } @GET - @Produces("application/json") public Iterable findAll() { return fruitRepository.findAll(); } @@ -232,14 +232,12 @@ public class FruitResource { @POST @Path("/name/{name}/color/{color}") - @Produces("application/json") public Fruit create(@PathParam String name, @PathParam String color) { return fruitRepository.save(new Fruit(name, color)); } @PUT @Path("/id/{id}/color/{color}") - @Produces("application/json") public Fruit changeColor(@PathParam Long id, @PathParam String color) { Optional optional = fruitRepository.findById(id); if (optional.isPresent()) { @@ -253,7 +251,6 @@ public class FruitResource { @GET @Path("/color/{color}") - @Produces("application/json") public List findByColor(@PathParam String color) { return fruitRepository.findByColor(color); } @@ -599,7 +596,7 @@ quarkus.hibernate-orm.implicit-naming-strategy=org.springframework.boot.orm.jpa. ==== More examples -An extensive list of examples can be seen in the https://github.com/quarkusio/quarkus/tree/master/integration-tests/spring-data-jpa[integration tests] directory which is located inside the Quarkus source code. +An extensive list of examples can be seen in the https://github.com/quarkusio/quarkus/tree/main/integration-tests/spring-data-jpa[integration tests] directory which is located inside the Quarkus source code. === What is currently unsupported @@ -633,3 +630,4 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] * link:spring-cache[Quarkus - Extension for Spring Cache] * link:spring-scheduled[Quarkus - Extension for Spring Scheduled] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] diff --git a/docs/src/main/asciidoc/spring-data-rest.adoc b/docs/src/main/asciidoc/spring-data-rest.adoc new file mode 100644 index 0000000000000..0d1e4865e398c --- /dev/null +++ b/docs/src/main/asciidoc/spring-data-rest.adoc @@ -0,0 +1,441 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus - Extension for Spring Data REST + +include::./attributes.adoc[] +:extension-status: experimental + +While users are encouraged to use REST Data with Panache for the REST data access endpoints generation, +Quarkus provides a compatibility layer for Spring Data REST in the form of the `spring-data-rest` extension. + + +include::./status-include.adoc[] + +== Prerequisites + +To complete this guide, you need: + +* less than 15 minutes +* an IDE +* JDK 1.8+ installed with `JAVA_HOME` configured appropriately +* Apache Maven {maven-version} + +== Solution + +We recommend that you follow the instructions in the next sections and create the application step by step. +However, you can go right to the completed example. + +Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {quickstarts-archive-url}[archive]. + +The solution is located in the `spring-data-rest-quickstart` {quickstarts-tree-url}/spring-data-rest-quickstart[directory]. + +== Creating the Maven project + +First, we need a new project. Create a new project with the following command: + +[source,bash,subs=attributes+] +---- +mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ + -DprojectGroupId=org.acme \ + -DprojectArtifactId=spring-data-rest-quickstart \ + -Dextensions="spring-data-rest,resteasy-jackson,quarkus-jdbc-postgresql" \ + -DnoExamples +cd spring-data-rest-quickstart +---- + +This command generates a Maven project with the `spring-data-rest` extension. + +If you already have your Quarkus project configured, you can add the `spring-data-rest` extension +to your project by running the following command in your project base directory: + +[source,bash] +---- +./mvnw quarkus:add-extension -Dextensions="spring-data-rest" +---- + +This will add the following to your `pom.xml`: + +[source,xml] +---- + + io.quarkus + quarkus-spring-data-rest + +---- + +For the tests you will also need REST Assured. Add it to the `pom.xml`: +[source,xml] +---- + + io.rest-assured + rest-assured + test + +---- + +Note: both `resteasy-jackson` and `resteasy-jsonb` are supported and can be interchanged. + +== Define the Entity + +Throughout the course of this guide, the following JPA Entity will be used: + +[source,java] +---- +package org.acme.spring.data.rest; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; + +@Entity +public class Fruit { + + @Id + @GeneratedValue + private Long id; + + private String name; + + private String color; + + + public Fruit() { + } + + public Fruit(String name, String color) { + this.name = name; + this.color = color; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getColor() { + return color; + } + + public void setColor(String color) { + this.color = color; + } +} +---- + + +== Configure database access properties + +Add the following properties to `application.properties` to configure access to a local PostgreSQL instance. + +[source,properties] +---- +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=quarkus_test +quarkus.datasource.password=quarkus_test +quarkus.datasource.jdbc.url=jdbc:postgresql:quarkus_test +quarkus.datasource.jdbc.max-size=8 +quarkus.hibernate-orm.database.generation=drop-and-create +---- + +This configuration assumes that PostgreSQL will be running locally. + +A very easy way to accomplish that is by using the following Docker command: + +[source,bash] +---- +docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name quarkus_test -e POSTGRES_USER=quarkus_test -e POSTGRES_PASSWORD=quarkus_test -e POSTGRES_DB=quarkus_test -p 5432:5432 postgres:11.5 +---- + +If you plan on using a different setup, please change your `application.properties` accordingly. + +== Prepare the data + +To make it easier to showcase some capabilities of Spring Data REST on Quarkus, some test data should be inserted into the database +by adding the following content to a new file named `src/main/resources/import.sql`: + +[source,sql] +---- +INSERT INTO fruit(id, name, color) VALUES (1, 'Cherry', 'Red'); +INSERT INTO fruit(id, name, color) VALUES (2, 'Apple', 'Red'); +INSERT INTO fruit(id, name, color) VALUES (3, 'Banana', 'Yellow'); +INSERT INTO fruit(id, name, color) VALUES (4, 'Avocado', 'Green'); +INSERT INTO fruit(id, name, color) VALUES (5, 'Strawberry', 'Red'); +---- + +Hibernate ORM will execute these queries on application startup. + +== Define the repository + +It is now time to define the repository that will be used to access `Fruit`. +In a typical Spring Data fashion, create a repository like so: + +[source,java] +---- +package org.acme.spring.data.rest; + +import org.springframework.data.repository.CrudRepository; + +public interface FruitsRepository extends CrudRepository { +} +---- + +The `FruitsRepository` above extends Spring Data's `org.springframework.data.repository.CrudRepository` which means that all of the latter's methods are +available to `FruitsRepository`. + +The `spring-data-jpa` extension will generate an implementation for this repository. Then the `spring-data-rest` extension will generate a REST CRUD resource for it. + +== Update the test + +To test the capabilities of `FruitsRepository` proceed to update the content of `FruitsRepositoryTest` to: + +[source,java] +---- +package org.acme.spring.data.rest; + +import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.core.IsNot.not; + +@QuarkusTest +class FruitsRepositoryTest { + + @Test + void testListAllFruits() { + //List all, should have all 3 fruits the database has initially: + given() + .accept("application/json") + .when().get("/fruits") + .then() + .statusCode(200) + .body( + containsString("Cherry"), + containsString("Apple"), + containsString("Banana") + ); + + //Delete the Cherry: + given() + .when().delete("/fruits/1") + .then() + .statusCode(204); + + //List all, cherry should be missing now: + given() + .accept("application/json") + .when().get("/fruits") + .then() + .statusCode(200) + .body( + not(containsString("Cherry")), + containsString("Apple"), + containsString("Banana") + ); + + //Create a new Fruit + given() + .contentType("application/json") + .accept("application/json") + .body("{\"name\": \"Orange\", \"color\": \"Orange\"}") + .when().post("/fruits") + .then() + .statusCode(201) + .body(containsString("Orange")) + .body("id", notNullValue()) + .extract().body().jsonPath().getString("id"); + + //List all, Orange should be present now: + given() + .accept("application/json") + .when().get("/fruits") + .then() + .statusCode(200) + .body( + not(containsString("Cherry")), + containsString("Apple"), + containsString("Orange") + ); + } +} + +---- + +The test can be easily run by issuing: `./mvnw test`. + +== Package and run the application + +Quarkus dev mode works with the defined repositories just like with any other Quarkus extension, greatly enhancing your productivity during the dev cycle. +The application can be started in dev mode as usual using: + +[source, bash] +---- +./mvnw quarkus:dev +---- + +== Run the application as a native binary + +You can of course create a native executable following the instructions of the link:building-native-image[Building native executables] guide. + +== Supported Spring Data REST functionalities + +Quarkus currently supports a subset of Spring Data REST features, namely the most useful and most commonly used features. + +=== What is supported + +The following sections describe the most important supported features of Spring Data REST. + +==== Automatic REST endpoint generation + +Interfaces that extend any of the following Spring Data repositories get automatically generated REST endpoints: + +* `org.springframework.data.repository.CrudRepository` +* `org.springframework.data.repository.PagingAndSortingRepository` +* `org.springframework.data.jpa.repository.JpaRepository` + +Endpoints generated from the above repositories expose five common REST operations: + +* `GET /fruits` - lists all entities or returns a page if `PagingAndSortingRepository` or `JpaRepository` is used. +* `GET /fruits/:id` - returns an entity by ID. +* `POST /fruits` - creates a new entity. +* `PUT /fruits/:id` - updates an existing entity or creates a new one with a specified ID (if allowed by the entity definition). +* `DELETE /fruits/:id` - deletes an entity by ID. + +There are two supported data types: `application/json` and `application/hal+json`. +The former is used by default, but it is highly recommended to specify which one you prefer with an `Accept` header. + +==== Exposing many entities + +If a database contains many entities, it might not be a great idea to return them all at once. +`PagingAndSortingRepository` allows the `spring-data-rest` extension to access data in chunks. + +Replace the `CrudRepository` with `PagingAndSortingRepository` in the `FruitsRepository`: + +[source,java] +---- +package org.acme.spring.data.rest; + +import org.springframework.data.repository.PagingAndSortingRepository; + +public interface FruitsRepository extends PagingAndSortingRepository { +} +---- + +Now the `GET /fruits` will accept three new query parameters: `sort`, `page` and `size`. + +|=== +| Query parameter | Description | Default value | Example values + +| `sort` +| Sorts the entities that are returned by the list operation +| "" +| `?sort=name` (ascending name), `?sort=name,-color` (ascending name and descending color) + +| `page` +| Zero indexed page number. Invalid value is interpreted as 0. +| 0 +| 0, 11, 100 + +| `size` +| Page size. Minimal accepted value is 1. Any lower value is interpreted as 1. +| 20 +| 1, 11, 100 +|=== + +For paged responses, `spring-data-rest` also returns a set of link headers that can be used to access other pages: first, previous, next and last. + +==== Fine tuning endpoints generation + +This allows user to specify which methods should be exposed and what path should be used to access them. +Spring Data REST provides two annotations that can be used: `@RepositoryRestResource` and `@RestResource`. +`spring-data-rest` extension supports the `exported`, `path` `collectionResourceRel` attributes of these annotations. + +Assume for example that fruits repository should be accessible by a `/my-fruits` path and only allow `GET` operation. +In such a case, `FruitsRepository` would look like so: + +[source,java] +---- +package org.acme.spring.data.rest; + +import java.util.Optional; + +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.rest.core.annotation.RepositoryRestResource; +import org.springframework.data.rest.core.annotation.RestResource; + +@RepositoryRestResource(exported = false, path = "/my-fruits") +public interface FruitsRepository extends CrudRepository { + + @RestResource(exported = true) + Optional findById(Long id); + + @RestResource(exported = true) + Iterable findAll(); +} +---- + +`spring-data-rest` uses only a subset of the repository methods for data access. +It is important to annotate the correct method in order to customize its REST endpoint: + +|=== +|REST operation |CrudRepository |PagingAndSortingRepository and JpaRepository + +|Get by ID +|`Optional findById(ID id)` +|`Optional findById(ID id)` + +|List +|`Iterable findAll()` +|`Page findAll(Pageable pageable)` + +|Create +|` S save(S entity)` +|` S save(S entity)` + +|Update +|` S save(S entity)` +|` S save(S entity)` + +|Delete +|`void deleteById(ID id)` +|`void deleteById(ID id)` +|=== + +=== What is currently unsupported + +* Only the repository methods listed above are supported. No other standard or custom methods are supported. +* Only the `exposed`, `path` and `collectionResourceRel` annotation properties are supported. + +== Important Technical Note + +Please note that the Spring support in Quarkus does not start a Spring Application Context nor are any Spring infrastructure classes run. +Spring classes and annotations are only used for reading metadata and / or are used as user code method return types or parameter types. + +== More Spring guides + +Quarkus has more Spring compatibility features. See the following guides for more details: + +* link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-di[Quarkus - Extension for Spring DI] +* link:spring-web[Quarkus - Extension for Spring Web] +* link:spring-security[Quarkus - Extension for Spring Security] +* link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] +* link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] +* link:spring-cache[Quarkus - Extension for Spring Cache] +* link:spring-scheduled[Quarkus - Extension for Spring Scheduled] diff --git a/docs/src/main/asciidoc/spring-di.adoc b/docs/src/main/asciidoc/spring-di.adoc index 03335f70a9e09..10c9a9b0e28ae 100644 --- a/docs/src/main/asciidoc/spring-di.adoc +++ b/docs/src/main/asciidoc/spring-di.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Quarkus Extension for Spring DI API @@ -37,14 +37,14 @@ The solution is located in the `spring-di-quickstart` {quickstarts-tree-url}/spr First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-di-quickstart \ -DclassName="org.acme.spring.di.GreeterResource" \ -Dpath="/greeting" \ - -Dextensions="spring-di" + -Dextensions="resteasy,spring-di" cd spring-di-quickstart ---- @@ -106,6 +106,8 @@ public class AppConfiguration { } } ---- +As a Spring developer, you might be tempted to add the `@ComponentScan` annotation in order to define specific packages to scan for additional beans. Do note that `@ComponentScan` is entirely unnecessary since Quarkus performs link:cdi-reference#bean_discovery[bean discovery] only in `annotated` mode with no visibility boundaries. Moreover, note that the bean discovery in Quarkus happens at build time. +In the same vein, Quarkus does not support the Spring `@Import` annotation. Now we define another bean that will implement `StringFunction` using Spring's stereotype annotation style. This bean will effectively be a no-op bean that simply returns the input as is. @@ -130,7 +132,7 @@ public class NoOpSingleStringFunction implements StringFunction { Quarkus also provides support for injecting configuration values using Spring's `@Value` annotation. To see that in action, first edit the `src/main/resources/application.properties` with the following content: -[source,java] +[source,properties] ---- # Your configuration properties greeting.message = hello @@ -275,7 +277,9 @@ You can of course create a native image using instructions similar to link:build Please note that the Spring support in Quarkus does not start a Spring Application Context nor are any Spring infrastructure classes run. Spring classes and annotations are only used for reading metadata and / or are used as user code method return types or parameter types. What that means for end users, is that adding arbitrary Spring libraries will not have any effect. Moreover Spring infrastructure -classes (like `org.springframework.beans.factory.config.BeanPostProcessor` for example) will not be executed. +classes (like `org.springframework.beans.factory.config.BeanPostProcessor` , `org.springframework.context.ApplicationContext` for example) will not be executed. +Regarding the dependency injection in particular, Quarkus uses a Dependency Injection mechanism (called ArC) based on the https://docs.jboss.org/cdi/spec/2.0/cdi-spec.html[Contexts and Dependency Injection for Java 2.0] specification. If you want to learn more about it we recommend you to read the link:cdi[Quarkus introduction to CDI] and the link:cdi-reference#arc-configuration-reference[CDI reference guide] +The various Spring Boot test features are not supported by Quarkus. For testing purposes, please, check the link:getting-started-testing[Quarkus testing guide]. == Conversion Table @@ -319,6 +323,14 @@ The following table shows how Spring DI annotations can be converted to CDI and |@Scope | |Doesn't have a one-to-one mapping to a CDI annotation. Depending on the value of @Scope, one of the @Singleton, @ApplicationScoped, @SessionScoped, @RequestScoped, @Dependent could be used + +|@ComponentScan +| +|Doesn't have a one-to-one mapping to a CDI annotation. It is not used in Quarkus because Quarkus does all classpath scanning at build time. + +|@Import +| +|Doesn't have a one-to-one mapping to a CDI annotation. |=== == More Spring guides @@ -327,6 +339,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-security[Quarkus - Extension for Spring Security] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] diff --git a/docs/src/main/asciidoc/spring-scheduled.adoc b/docs/src/main/asciidoc/spring-scheduled.adoc index f8255d9560d2a..8643c063899ad 100644 --- a/docs/src/main/asciidoc/spring-scheduled.adoc +++ b/docs/src/main/asciidoc/spring-scheduled.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Quarkus Extension for Spring Scheduling API @@ -38,14 +38,14 @@ The solution is located in the `spring-scheduled-quickstart` {quickstarts-tree-u First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-scheduler-quickstart \ -DclassName="org.acme.spring.scheduler.CountResource" \ -Dpath="/count" \ - -Dextensions="spring-scheduled" + -Dextensions="resteasy,spring-scheduled" cd spring-scheduler-quickstart ---- @@ -126,7 +126,7 @@ public class CounterBean { == Updating the application configuration file Edit the `application.properties` file and add the `cron.expr` and the `fixedRate.expr` configuration: -[source,shell] +[source,properties] ---- # The syntax used by Spring for cron expressions is the same as which is used by regular Quarkus scheduler. cron.expr=*/5 * * * * ? @@ -206,7 +206,7 @@ Observe the console to verify that the following messages has been displayed: - `Fixed Rate expression configured in application.properties` These messages indicate that the executions of methods annotated with `@Scheduled` have been triggered. -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also generate the native executable with `./mvnw clean package -Pnative`. == Using Property Expressions @@ -236,6 +236,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-di[Quarkus - Extension for Spring DI] * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] * link:spring-cache[Quarkus - Extension for Spring Cache] diff --git a/docs/src/main/asciidoc/spring-security.adoc b/docs/src/main/asciidoc/spring-security.adoc index 0f010feef908e..a2e5c2d472bca 100644 --- a/docs/src/main/asciidoc/spring-security.adoc +++ b/docs/src/main/asciidoc/spring-security.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Quarkus Extension for Spring Security API @@ -38,14 +38,14 @@ The solution is located in the `spring-security-quickstart` {quickstarts-tree-ur First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=spring-security-quickstart \ -DclassName="org.acme.spring.security.GreetingController" \ -Dpath="/greeting" \ - -Dextensions="spring-web, spring-security, quarkus-elytron-security-properties-file" + -Dextensions="spring-web,spring-security,quarkus-elytron-security-properties-file" cd spring-security-quickstart ---- @@ -56,7 +56,7 @@ to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="spring-web, spring-security, quarkus-elytron-security-properties-file" +./mvnw quarkus:add-extension -Dextensions="spring-web,spring-security,quarkus-elytron-security-properties-file" ---- This will add the following to your `pom.xml`: @@ -91,7 +91,6 @@ package org.acme.spring.security; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.bind.annotation.PathVariable; @RestController @RequestMapping("/greeting") @@ -149,10 +148,10 @@ The updated controller will be: ---- package org.acme.spring.security; +import org.springframework.security.access.annotation.Secured; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.bind.annotation.PathVariable; @RestController @RequestMapping("/greeting") @@ -402,7 +401,7 @@ Some examples of allowed expressions are: @PreAuthorize("hasAnyRole('view1', 'view2') OR isAnonymous() OR hasRole('test')") public void allowedForAdminOrAnonymous() { - + } ---- @@ -435,6 +434,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-di[Quarkus - Extension for Spring DI] * link:spring-web[Quarkus - Extension for Spring Web] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] * link:spring-cache[Quarkus - Extension for Spring Cache] diff --git a/docs/src/main/asciidoc/spring-web.adoc b/docs/src/main/asciidoc/spring-web.adoc index c0933cb00c49f..ed61c15a8f9bc 100644 --- a/docs/src/main/asciidoc/spring-web.adoc +++ b/docs/src/main/asciidoc/spring-web.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Quarkus Extension for Spring Web API @@ -37,7 +37,7 @@ The solution is located in the `spring-web-quickstart` {quickstarts-tree-url}/sp First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ @@ -221,7 +221,7 @@ This is enough to generate a basic OpenAPI schema document from your REST Endpoi [source,bash] ---- -curl http://localhost:8080/openapi +curl http://localhost:8080/q/openapi ---- You will see the generated OpenAPI schema document: @@ -255,7 +255,7 @@ paths: "200": description: OK content: - '*/*': + 'application/json': schema: $ref: '#/components/schemas/Greeting' components: @@ -267,11 +267,11 @@ components: type: string ---- -Also see link:https://quarkus.io/guides/openapi-swaggerui[the OpenAPI Guide] +Also see link:openapi-swaggerui[the OpenAPI Guide] === Adding MicroProfile OpenAPI Annotations -You can use link:https://github.com/eclipse/microprofile-open-api[MicroProfile OpenAPI] to better document your schema, +You can use link:https://github.com/eclipse/microprofile-open-api[MicroProfile OpenAPI] to better document your schema, example, adding the following to the class level of the `GreetingController`: [source, java] @@ -286,7 +286,7 @@ example, adding the following to the class level of the `GreetingController`: email = "techsupport@example.com"), license = @License( name = "Apache 2.0", - url = "http://www.apache.org/licenses/LICENSE-2.0.html")) + url = "https://www.apache.org/licenses/LICENSE-2.0.html")) ) ---- @@ -321,7 +321,7 @@ info: email: techsupport@example.com license: name: Apache 2.0 - url: http://www.apache.org/licenses/LICENSE-2.0.html + url: https://www.apache.org/licenses/LICENSE-2.0.html version: 1.0.1 tags: - name: Hello @@ -369,9 +369,9 @@ components: === Using Swagger UI Swagger UI is included by default when running in `Dev` or `Test` mode, and can optionally added to `Prod` mode. -See link:https://quarkus.io/guides/openapi-swaggerui#use-swagger-ui-for-development[the Swagger UI] Guide for more details. +See link:openapi-swaggerui#use-swagger-ui-for-development[the Swagger UI] Guide for more details. -Navigate to link:http://localhost:8080/swagger-ui/[localhost:8080/swagger-ui/] and you will see the Swagger UI screen: +Navigate to link:http://localhost:8080/q/swagger-ui/[localhost:8080/q/swagger-ui/] and you will see the Swagger UI screen: image:spring-web-guide-screenshot01.png[alt=Swagger UI] @@ -443,7 +443,7 @@ For this to function however, users need to add the `quarkus-undertow` dependenc The following method return types are supported: * `org.springframework.http.ResponseEntity` -* `java.util.Map` +* `java.util.Map` Other return types mentioned in the Spring `https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/bind/annotation/ExceptionHandler.html[ExceptionHandler javadoc]` are not supported. @@ -453,7 +453,7 @@ The following parameter types are supported, in arbitrary order: * An exception argument: declared as a general `Exception` or as a more specific exception. This also serves as a mapping hint if the annotation itself does not narrow the exception types through its `value()`. * Request and/or response objects (typically from the Servlet API). You may choose any specific request/response type, e.g. `ServletRequest` / `HttpServletRequest`. To use Servlet API, the `quarkus-undertow` dependency needs to be added. - + Other parameter types mentioned in the Spring `https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/bind/annotation/ExceptionHandler.html[ExceptionHandler javadoc]` are not supported. == Important Technical Note @@ -470,7 +470,7 @@ The following table shows how Spring Web annotations can be converted to JAX-RS |=== |Spring |JAX-RS |Comments -|@RequestController +|@RestController | |There is no equivalent in JAX-RS. Annotating a class with @Path suffices @@ -517,6 +517,7 @@ Quarkus has more Spring compatibility features. See the following guides for mor * link:spring-di[Quarkus - Extension for Spring DI] * link:spring-data-jpa[Quarkus - Extension for Spring Data JPA] +* link:spring-data-rest[Quarkus - Extension for Spring Data REST] * link:spring-security[Quarkus - Extension for Spring Security] * link:spring-cloud-config-client[Quarkus - Reading properties from Spring Cloud Config Server] * link:spring-boot-properties[Quarkus - Extension for Spring Boot properties] diff --git a/docs/src/main/asciidoc/tests-with-coverage.adoc b/docs/src/main/asciidoc/tests-with-coverage.adoc index fb46d008fb54a..9ddcc097ea9d2 100644 --- a/docs/src/main/asciidoc/tests-with-coverage.adoc +++ b/docs/src/main/asciidoc/tests-with-coverage.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Measuring the coverage of your tests @@ -35,7 +35,7 @@ To complete this guide, you need: * an IDE * JDK 1.8+ installed with JAVA_HOME configured appropriately * Apache Maven {maven-version} -* Having completed the link:https://quarkus.io/guides/getting-started-testing[Testing your application guide] +* Having completed the link:getting-started-testing[Testing your application guide] == Architecture @@ -53,7 +53,7 @@ The solution is located in the `tests-with-coverage-quickstart` {quickstarts-tre Let's start from an empty application created with the Quarkus Maven plugin: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:create \ -DprojectGroupId=org.acme \ @@ -119,26 +119,7 @@ public class GreetingService { } ---- -The project will also need some tests. First a simple JUnit: - -[source,java] ----- -package org.acme.testcoverage; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class GreetingServiceTest { - - @Test - public void testGreetingService() { - GreetingService service = new GreetingService(); - Assertions.assertEquals("hello Quarkus", service.greeting("Quarkus")); - } -} ----- - -But also a `@QuarkusTest`: +The project will also need a test: [source,java] ---- @@ -154,7 +135,6 @@ import static io.restassured.RestAssured.given; import static org.hamcrest.CoreMatchers.is; @QuarkusTest -@Tag("integration") public class GreetingResourceTest { @Test @@ -179,283 +159,56 @@ public class GreetingResourceTest { } ---- -The first one will be our example of a Unit Test and the second one will be our example of Integration Test. +== Setting up Jacoco -== Separating executions of Unit Tests and Integration Tests +Now we need to add Jacoco to our project. To do this we need to add the following to the `pom.xml` dependencies section: -You may want to consider that JUnits and QuarkusTests are two different kind of tests and that they deserve to be separated. This way you could run them separately, in different cases or some more often than the others. -In order to do so, we'll use a feature of JUnit 5 that allows us to tag some tests. Let's tag `GreetingResourceTest.java` and specify that it is an Integration Test: - -[source,java] +[source,xml] ---- -import org.junit.jupiter.api.Tag; -... - -@QuarkusTest -@Tag("integration") -public class GreetingResourceTest { - ... -} + + io.quarkus + quarkus-jacoco + test + ---- -We're now able to distinguish unit tests and integration tests. Now, let's bind them to different Maven lifecycle phases. Let's use surefire to bind unit tests to the *test* phase and the integration tests to the *integration-test* phase. +This Quarkus extension takes care of everything that would usually be done via the Jacoco maven plugin, so no additional +config is required. -[source,xml,subs=attributes+] ----- - - ... - - - - maven-surefire-plugin - ${surefire-plugin.version} - - integration - - org.jboss.logmanager.LogManager - ${maven.home} - - - - - integration-tests - integration-test - - test - - - !integration - integration - - - - - - - ... - ----- - -NOTE: This way, the `QuarkusTest` instances will be executed as part of the `integration-test` build phase while the other JUnit tests will still be ran during the `test` phase. -You can run all the tests with the command `./mvnw clean verify` (and you will notice that two tests are ran in different phases). +== Running the tests with coverage -== Measuring the coverage of JUnit tests using JaCoCo +Run `mvn verify`, the tests will be run and the results will end up in `target/jacoco-reports`. This is all that is needed, +the `quarkus-jacoco` extension allows Jacoco to just work out of the box. -It is now time to introduce JaCoCo to measure the coverage. The straightforward way to add JaCoCo to your build is to reference the plugin in your `pom.xml`. +There are some config options that affect this: -[source,xml,subs=attributes+] ----- - - ... - 0.8.4 - +include::{generated-dir}/config/quarkus-jacoco-jacoco-config.adoc[opts=optional, leveloffset=+1] - - - ... - - org.jacoco - jacoco-maven-plugin - ${jacoco.version} - - - default-prepare-agent - - prepare-agent - - - - default-report - - report - - - ${project.build.directory}/jacoco.exec - ${project.reporting.outputDirectory}/jacoco - - - - - - ----- +== Coverage for tests not using @QuarkusTest -NOTE: If you run `./mvnw clean test` the coverage information will be collected during the execution of the unit tests in the file `jacoco.exec`. +The Quarkus automatic Jacoco config will only work for tests that are annotated with `@QuarkusTest`. If you want to check +the coverage of other tests as well then you will need to fall back to the Jacoco maven plugin. -== Measuring separately the coverage of each test type +Because Quarkus uses class file transformation it is not possible to use online transformation with the Jacoco agent. +Instead we need to use offline transformation. -It is not strictly necessary, but let's distinguish the coverage brought by each test type. To do so, we`ll just output the coverage info in two different files, one in `jacoco-ut.exec` and one in `jacoco-it.exec`. -We also need to generate a separate report for each test execution. Let's adjust the Jacoco configuration for that: - -[source,xml,subs=attributes+] ----- - - ... - 0.8.4 - - - - - ... - - org.jacoco - jacoco-maven-plugin - ${jacoco.version} - - - prepare-agent-ut - - prepare-agent - - - ${project.build.directory}/jacoco-ut.exec - - - - prepare-agent-it - pre-integration-test - - prepare-agent - - - ${project.build.directory}/jacoco-it.exec - - - - report-ut - - report - - - ${project.build.directory}/jacoco-ut.exec - ${project.reporting.outputDirectory}/jacoco-ut - - - - report-it - post-integration-test - - report - - - ${project.build.directory}/jacoco-it.exec - ${project.reporting.outputDirectory}/jacoco-it - - - - - - ----- - -== The coverage does not seem to correspond to the reality - -You can now run the tests: `./mvnw clean verify` As explained earlier, it will run the unit tests first, then the integration tests. And finally, it will generate two separate reports. First a report of the coverage of the unit tests in `target/site/jacoco-ut` then a report of the coverage of the integration tests in `target/site/jacoco-it`. - -Given the content of `GreetingResourceTest`, `GreetingResource` should have been covered. But when we open the report `target/site/jacoco-it/index.html`, the class `GreetingResource` is reported with 0% of coverage. But the fact that `GreetingService` is reported as covered shows that the test execution was actually recorded. How come? - -During the report generation, you may have noticed a warning: - -[source,shell,subs=attributes+] ----- -[WARNING] Classes in bundle '***' do no match with execution data. For report generation the same class files must be used as at runtime. -[WARNING] Execution data for class org/acme/testcoverage/GreetingResource does not match. ----- - -It seems that Quarkus and JaCoCo step on each other's toes. What happens is that Quarkus transforms the JAX-RS resources (and also the Panache files). -You may have noticed that `GreetingResource` was not written in the simplest way like: - -[source,java] ----- -... -@Path("/hello") -public class GreetingResource { - - @Inject - GreetingService service; - - @GET - @Produces(MediaType.TEXT_PLAIN) - @Path("/greeting/{name}") - public String greeting(@PathParam("name") String name) { - return service.greeting(name); - } - - @GET - @Produces(MediaType.TEXT_PLAIN) - public String hello() { - return "hello"; - } -} ----- - -Above, the constructor is implicit and we use injection to have an instance of `GreetingService`. Note that, with this code relying on an implicit constructor, the coverage would have been reported properly by JaCoCo. -Instead, we introduced a constructor based injection: - -[source,java] ----- -... -@Path("/hello") -public class GreetingResource { - - private final GreetingService service; - - @Inject - public GreetingResource(GreetingService service) { - this.service = service; - } -... -} ----- - -Some might say that this approach is preferable since the field can be *final* like this. Anyway, in some cases you might need an explicit constructor. And, in that case, the coverage is not reported properly by JaCoCo. -This is because Quarkus generates a constructor without any parameter and does some bycode manipulations in order to add it to the class. That is what happened here, just before the execution of the integration tests: - -[source,shell,subs=attributes+] ----- -[INFO] --- quarkus-maven-plugin:0.16.0:build (default) @ getting-started-testing --- -[INFO] [io.quarkus.deployment.QuarkusAugmentor] Beginning quarkus augmentation -... ----- - -As a consequence, JaCoCo does not recognize the classes when it wants to create its report. But wait... there is a solution. - -== Instrumenting the classes instead - -JaCoCo has two modes. The first one is based on an agent and instruments classes on-the-fly. Unfortunately, this is incompatible with the dynamic classfile transformations that Quarkus does. The second mode is called link:https://www.eclemma.org/jacoco/trunk/doc/offline.html[offline instrumentation]. Classes are pre-instrumented in advance via the *jacoco:instrument* Maven goal and during their usage (when the tests are ran), *jacocoagent.jar* must be added to the classpath. -Once the tests have been executed, it is recommended to restore the original classes using the *jacoco:restore-instrumented-classes* Maven goal. - -Let's first add the dependency on *jacocoagent.jar*: +Instead of including the `quarkus-jacoco` extension in your pom you will need the following config: [source,xml,subs=attributes+] ---- ... - ... + ... - org.jacoco - org.jacoco.agent - runtime - test - ${jacoco.version} + org.jacoco + org.jacoco.agent + runtime + test + ${jacoco.version} - - ----- - -Then let's configure three jacoco plugin goals for unit tests: - -* One to instrument the classes during the *process-classes* phase -* One to restore the original classes during the *prepare-package* phase (after the tests are ran) -* One to generate the report during the *verify* phase (the report generation requires the original classes to have been restored) - -and a similar setup for the integration tests too: - -[source,xml,subs=attributes+] ----- - ... +
    ... @@ -482,33 +235,7 @@ and a similar setup for the integration tests too: report - ${project.build.directory}/jacoco-ut.exec - ${project.reporting.outputDirectory}/jacoco-ut - - - - instrument-it - pre-integration-test - - instrument - - - - restore-it - post-integration-test - - restore-instrumented-classes - - - - report-it - post-integration-test - - report - - - ${project.build.directory}/jacoco-it.exec - ${project.reporting.outputDirectory}/jacoco-it + ${project.reporting.outputDirectory}/jacoco-reports @@ -531,178 +258,19 @@ It also requires a small change in the Surefire configuration. Note below that w maven-surefire-plugin ${surefire-plugin.version} - integration - ${project.build.directory}/jacoco-ut.exec + ${project.build.directory}/jacoco.exec org.jboss.logmanager.LogManager ${maven.home} - - - integration-tests - integration-test - - test - - - !integration - integration - - ${project.build.directory}/jacoco-it.exec - - - -
    ---- -Let's now check the generated report that can be found in `target/site/jacoco-it/index.html`. The report now shows that `GreetingResource` is actually properly covered! Yay! - -== Bonus: Building a consolidated report for Unit Tests and Integration Tests - -So, finally, let's improve the setup even further and let's merge the two execution files (*jacoco-ut.exec* and *jacoco-it.exec*) into one consolidated report and generate a consolidated report that will show the coverage of all your tests combined. - -You should end up with something like this (note the addition of the `merge-results` and `post-merge-report` executions): - -[source,xml,subs=attributes+] ----- - - ... - - - - maven-surefire-plugin - ${surefire-plugin.version} - - integration - - ${project.build.directory}/jacoco-ut.exec - org.jboss.logmanager.LogManager - ${maven.home} - - - - - integration-tests - integration-test - - test - - - !integration - integration - - ${project.build.directory}/jacoco-it.exec - - - - - - ... - - org.jacoco - jacoco-maven-plugin - ${jacoco.version} - - - instrument-ut - - instrument - - - - restore-ut - - restore-instrumented-classes - - - - report-ut - - report - - - ${project.build.directory}/jacoco-ut.exec - ${project.reporting.outputDirectory}/jacoco-ut - - - - instrument-it - pre-integration-test - - instrument - - - - restore-it - post-integration-test - - restore-instrumented-classes - - - - report-it - post-integration-test - - report - - - ${project.build.directory}/jacoco-it.exec - ${project.reporting.outputDirectory}/jacoco-it - - - - merge-results - verify - - merge - - - - - ${project.build.directory} - - *.exec - - - - ${project.build.directory}/jacoco.exec - - - - post-merge-report - verify - - report - - - ${project.build.directory}/jacoco.exec - ${project.reporting.outputDirectory}/jacoco - - - - - - - ... - - ... - - org.jacoco - org.jacoco.agent - runtime - test - ${jacoco.version} - - - ----- - == Conclusion You now have all the information you need to study the coverage of your tests! -But remember, some code that is not covered is certinaly not well tested. But some code that is covered is not necessarily *well* tested. Make sure to write good tests! +But remember, some code that is not covered is certainly not well tested. But some code that is covered is not necessarily *well* tested. Make sure to write good tests! diff --git a/docs/src/main/asciidoc/tika.adoc b/docs/src/main/asciidoc/tika.adoc index b6c7f529e71ea..41bcd48ea5893 100644 --- a/docs/src/main/asciidoc/tika.adoc +++ b/docs/src/main/asciidoc/tika.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Apache Tika @@ -13,25 +13,13 @@ https://tika.apache.org/[Apache Tika] is a content analysis toolkit which is use [NOTE] ==== -If you are planning to run the application as a native executable and parse documents that may have been created with charsets different than the standard ones supported in Java such as `UTF-8` then you should configure Quarkus Maven Plugin to get the native image generator include all the charsets available to the JVM: +If you are planning to run the application as a native executable and parse documents that may have been created with charsets different than the standard ones supported in Java such as `UTF-8` then you should configure Quarkus to get the native image generator include all the charsets available to the JVM: [source,xml] ---- - - io.quarkus - quarkus-maven-plugin - - - native-image - - native-image - - - true - ... - - - - + + native + true + ---- ==== @@ -66,7 +54,7 @@ The provided solution contains a few additional elements such as tests and testi First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme.example \ @@ -178,7 +166,7 @@ As you can see the JAX-RS resource method was renamed to `extractText`, `@GET` a Now we are ready to run our application. Use: -[source,shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -192,7 +180,7 @@ $ ./mvnw clean compile quarkus:dev [INFO] Scanning for projects... [INFO] INFO] --------------------< org.acme.example:apache-tika >-------------------- -[INFO] Building apache-tika 1.0-SNAPSHOT +[INFO] Building apache-tika 1.0.0-SNAPSHOT [INFO] --------------------------------[ jar ]--------------------------------- ... Listening for transport dt_socket at address: 5005 @@ -224,7 +212,7 @@ Hello Quarkus == Building a native executable You can build a native executable with the usual command `./mvnw package -Pnative`. -Running it is as simple as executing `./target/tika-quickstart-1.0-SNAPSHOT-runner`. +Running it is as simple as executing `./target/tika-quickstart-1.0.0-SNAPSHOT-runner`. == Configuration Reference diff --git a/docs/src/main/asciidoc/tooling.adoc b/docs/src/main/asciidoc/tooling.adoc index 13d062d986600..4d3d7478db241 100644 --- a/docs/src/main/asciidoc/tooling.adoc +++ b/docs/src/main/asciidoc/tooling.adoc @@ -1,31 +1,34 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using our Tooling include::./attributes.adoc[] -Quarkus comes with a toolchain enabling developers from live reload all the way down to deploying a Kubernetes application. +Quarkus comes with a toolchain enabling developers from live reload all the way down to deploying a Kubernetes application. In addition there are plugins and extensions to all major IDEs. + In this guide, we will explore: * how to use link:maven-tooling[Maven] as a build tool * how to use link:gradle-tooling[Gradle] as a build tool -* how to use the native CLI for your toolchain (coming soon) +* how to use the CLI for your toolchain (coming soon) * how to create and scaffold a new project * how to deal with extensions * how to enable live reload * how to develop your application in your IDE * how to compile your application natively +* how to setup Quarkus tools in link:ide-tooling[Visual Studio Code, Eclipse IDE, Eclipse Che and IntelliJ] [[build-tool]] == Choosing your build tool Quarkus comes with a toolchain to help you at all development stages. You can use Maven or Gradle as build tool. -And we offer a native CLI that is convenient to use (coming soon). +And we offer a CLI that is convenient to use (coming soon). * link:maven-tooling[Maven] * link:gradle-tooling[Gradle] //* link:cli-tooling[CLI] +* link:ide-tooling[IDE] diff --git a/docs/src/main/asciidoc/transaction.adoc b/docs/src/main/asciidoc/transaction.adoc index 1dc455cb2c693..015779dbb2fe9 100644 --- a/docs/src/main/asciidoc/transaction.adoc +++ b/docs/src/main/asciidoc/transaction.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Using Transactions in Quarkus @@ -206,7 +206,7 @@ Does it work everywhere I want to?:: Yep, it works in your Quarkus application, in your IDE, in your tests, because all of these are Quarkus applications. JTA has some bad press for some people. I don't know why. -Let's just say that this is not your grand'pa's JTA implementation. +Let's just say that this is not your grandpa's JTA implementation. What we have is perfectly embeddable and lean. Does it do 2 Phase Commit and slow down my app?:: diff --git a/docs/src/main/asciidoc/validation.adoc b/docs/src/main/asciidoc/validation.adoc index 54a5fbcce5373..f758f52900111 100644 --- a/docs/src/main/asciidoc/validation.adoc +++ b/docs/src/main/asciidoc/validation.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Validation with Hibernate Validator @@ -42,18 +42,18 @@ The solution is located in the `validation-quickstart` {quickstarts-tree-url}/va First, we need a new project. Create a new project with the following command: -[source, subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=validation-quickstart \ -DclassName="org.acme.validation.BookResource" \ -Dpath="/books" \ - -Dextensions="resteasy-jsonb, hibernate-validator" + -Dextensions="resteasy,resteasy-jackson,hibernate-validator" cd validation-quickstart ---- -This command generates a Maven structure importing the RESTEasy/JAX-RS, JSON-B and Hibernate Validator/Bean Validation extensions. +This command generates a Maven structure importing the RESTEasy/JAX-RS, Jackson and Hibernate Validator/Bean Validation extensions. If you already have your Quarkus project configured, you can add the `hibernate-validator` extension to your project by running the following command in your project base directory: @@ -122,8 +122,6 @@ Add the following method: ---- @Path("/manual-validation") @POST -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public Result tryMeManualValidation(Book book) { Set> violations = validator.validate(book); if (violations.isEmpty()) { @@ -233,8 +231,6 @@ Calling the service in your rest end point triggers the `Book` validation automa @Path("/service-method-validation") @POST -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) public Result tryMeServiceMethodValidation(Book book) { try { bookService.validateBook(book); @@ -273,7 +269,7 @@ Then, open your browser to http://localhost:8080/: image:validation-guide-screenshot.png[alt=Application] -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also build the native executable using `./mvnw package -Pnative`. == Going further @@ -363,6 +359,76 @@ provided the supported locales have been properly specified in the `application. quarkus.locales=en-US,es-ES,fr-FR ---- +=== Validation groups for REST endpoint or service method validation + +It's sometimes necessary to enable different validation constraints +for the same class when it's passed to a different method. + +For example, a `Book` may need to have a `null` identifier when passed to the `put` method +(because the identifier will be generated), +but a non-`null` identifier when passed to the `post` method +(because the method needs the identifier to know what to update). + +To address this, you can take advantage of validation groups. +Validation groups are markers that you put on your constraints in order to enable or disable them at will. + +First, define the `Put` and `Post` groups, which are just Java interfaces. + +[source, java] +---- +public interface ValidationGroups { + interface Put extends Default { // <1> + } + interface Post extends Default { // <1> + } +} +---- +<1> Make the custom groups extend the `Default` group. +This means that whenever these groups are enabled, the `Default` group is also enabled. +This is useful if you have constraints that you want validated in both the `Put` and `Post` method: +you can simply use the default group on those constraints, like on the `title` property below. + +Then add the relevant constraints to `Book`, assigning the right group to each constraint: + +[source, java] +---- +public class Book { + + @Null(groups = ValidationGroups.Put.class) + @NotNull(groups = ValidationGroups.Post.class) + public Long id; + + @NotBlank + public String title; + +} +---- + +Finally, add a `@ConvertGroup` annotation next to your `@Valid` annotation in your validated method. + +[source, java] +---- +@Path("/") +@PUT +@Consumes(MediaType.APPLICATION_JSON) +public void put(@Valid @ConvertGroup(to = ValidationGroups.Put.class) Book book) { // <1> + // ... +} + +@Path("/") +@POST +@Consumes(MediaType.APPLICATION_JSON) +public void post(@Valid @ConvertGroup(to = ValidationGroups.Post.class) Book book) { // <2> + // ... +} +---- +<1> Enable the `Put` group, meaning only constraints assigned to the `Put` (and `Default`) groups +will be validated for the `book` parameter of the `put` method. +In this case, it means `Book.id` must be `null` and `Book.title` must not be blank. +<2> Enable the `Post` group, meaning only constraints assigned to the `Post` (and `Default`) groups +will be validated for the `book` parameter of the `post` method. +In this case, it means `Book.id` must not be `null` and `Book.title` must not be blank. + [[configuration-reference]] == Hibernate Validator Configuration Reference diff --git a/docs/src/main/asciidoc/vault-auth.adoc b/docs/src/main/asciidoc/vault-auth.adoc index 75814d457da2c..223f6648bcdd4 100644 --- a/docs/src/main/asciidoc/vault-auth.adoc +++ b/docs/src/main/asciidoc/vault-auth.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Working with HashiCorp Vault’s Authentication @@ -51,14 +51,14 @@ To complete this guide, you need: We assume there is a Vault container and a PostgreSQL container running from the {base-guide}, and the root token is known. First, create a new shell, `docker exec` into the container and set the root token: -[source, shell, subs=attributes+] +[source,shell, subs=attributes+] ---- docker exec -it dev-vault sh / # export VAULT_TOKEN={root-token} ---- Create a token for the `vault-quickstart-policy` policy: -[source, shell, subs=attributes+] +[source,shell, subs=attributes+] ---- / # vault token create -policy=vault-quickstart-policy Key Value @@ -79,14 +79,14 @@ quarkus.vault.authentication.client-token={client-token} ---- Compile and start the application: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- ./mvnw clean install -java -jar target/vault-quickstart-1.0-SNAPSHOT-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- Finally test the application endpoint: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl http://localhost:8080/hello/private-key ---- @@ -111,7 +111,7 @@ If the _Wrapping Token_ gets stolen and unwrapped, we will notice immediately be will get an error saying that the token is invalid. With that in mind, let’s create a new token and wrap it inside a _Wrapping Token_ with a TTL of 1 minute: -[source, shell, subs=attributes+] +[source,shell, subs=attributes+] ---- / # vault token create -wrap-ttl=60s -policy=vault-quickstart-policy Key Value @@ -133,7 +133,7 @@ quarkus.vault.authentication.client-token-wrapping-token={client-token-wrapping- Compile and run the application *without the tests*, you should be able now to curl the private key `{private-key}` as before. Stop the application, and execute tests with `./mvnw test`. They should fail with the following error: -[source, shell, subs=attributes+] +[source,text, subs=attributes+] ---- ERROR: Failed to start application io.quarkus.vault.VaultException: wrapping token is not valid or does not exist; this means that the token has already expired (if so you can increase the TTL on the wrapping token) or has been consumed by somebody else (potentially indicating that the wrapping token has been stolen) @@ -143,7 +143,7 @@ io.quarkus.vault.VaultException: wrapping token is not valid or does not exist; Normally the `userpass` auth method should already be enabled from the {base-guide}. If not, execute the following commands now: -[source, shell] +[source,bash] ---- vault auth enable userpass vault write auth/userpass/users/bob password=sinclair policies=vault-quickstart-policy @@ -157,7 +157,7 @@ quarkus.vault.authentication.userpass.password=sinclair ---- Test the application endpoint after compiling and starting the application again: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl http://localhost:8080/hello/private-key ---- @@ -180,7 +180,7 @@ _Approle_ is an authentication method suited for technical workflows. It relies To set up _Approle_ you need to enable the `approle` auth method, create an app role, and generate a role id and secret id: -[source, shell, subs=attributes+] +[source,shell, subs=attributes+] ---- / # vault auth enable approle / # vault write auth/approle/role/myapprole policies=vault-quickstart-policy @@ -210,7 +210,7 @@ After compiling and running the application you should be able to curl it on the === Approle using Response Wrapping Similarly to direct client token authentication, it is possible to wrap the `secret-id`: -[source, shell, subs=attributes+] +[source,shell, subs=attributes+] ---- / # vault write -wrap-ttl=60s -f auth/approle/role/myapprole/secret-id Key Value @@ -271,7 +271,7 @@ And apply it: `kubectl apply -f vault-auth-k8s.yml`. Once the objects are created, we need to capture the JWT token of this service account, and grab the public certificate of the cluster: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- secret_name=$(kubectl get sa vault-auth-sa -o json | jq -r '.secrets[0].name') token=$(kubectl get secret $secret_name -o json | jq -r '.data.token' | base64 --decode) @@ -283,7 +283,7 @@ kubectl get secret $secret_name -o json | jq -r '.data."ca.crt"' | base64 -D > / The next step requires to exec interactively with the root token into the Vault container to configure the Kubernetes auth method: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- docker exec -it dev-vault sh export VAULT_TOKEN={root-token} @@ -292,7 +292,7 @@ export VAULT_TOKEN={root-token} Once inside the pod, set the token variable to the value that was just printed in the console before, and recreate `ca.crt` with the same content as `/tmp/ca.crt` outside the container. Finally use `kubectl config view` to assess the url of your Kubernetes cluster: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- token=... => set the value printed in the console just before vi ca.crt => copy/paste /tmp/ca.crt from outside the container @@ -300,7 +300,7 @@ kubernetes_host => url from the kubectl config view (e.g. https://kubernetes.doc ---- Now we have all the information we need to configure Vault: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- vault auth enable kubernetes @@ -327,7 +327,10 @@ such that they will be accessible from the pod): [source, properties, subs=attributes+] ---- quarkus.vault.url=http://:8200 -quarkus.datasource.url = jdbc:postgresql://:5432/mydatabase +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username= +quarkus.datasource.password= +quarkus.datasource.jdbc.url = jdbc:postgresql://:5432/mydatabase quarkus.vault.authentication.kubernetes.role=vault-quickstart-role @@ -335,7 +338,7 @@ quarkus.log.category."io.quarkus.vault".level=DEBUG ---- Now build the application: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- ./mvnw package -DskipTests docker build -f src/main/docker/Dockerfile.jvm -t quarkus/vault-quickstart-jvm . @@ -397,7 +400,7 @@ And apply it: `kubectl apply -f vault-quickstart-k8s.yml`. This will deploy the application, and make it available on port `30400` of the Kubernetes host. You can check that the application has started from the logs: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- kubectl get pods pod=$(kubectl get pod -l app=vaultapp -o json | jq -r '.items[0].metadata.name') @@ -405,7 +408,7 @@ kubectl logs $pod ---- You should see: -[source, shell, subs=attributes+] +[source,text, subs=attributes+] ---- exec java -Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager -XX:+UseParallelGC -XX:GCTimeRatio=4 -XX:AdaptiveSizePolicyWeight=90 -XX:MinHeapFreeRatio=20 -XX:MaxHeapFreeRatio=40 -XX:+ExitOnOutOfMemoryError -cp . -jar /deployments/app.jar __ ____ __ _____ ___ __ ____ ______ @@ -419,21 +422,21 @@ __ ____ __ _____ ___ __ ____ ______ 2020-04-15 18:30:01,779 DEBUG [io.qua.vau.run.VaultAuthManager] (main) created new login token: {clientToken: ***, renewable: true, leaseDuration: 3600s, valid_until: Wed Apr 15 19:30:01 GMT 2020} 2020-04-15 18:30:01,802 DEBUG [io.qua.vau.run.con.VaultConfigSource] (main) loaded 1 properties from vault 2020-04-15 18:30:02,722 DEBUG [io.qua.vau.run.VaultAuthManager] (Agroal_7305849841) extended login token: {clientToken: ***, renewable: true, leaseDuration: 3600s, valid_until: Wed Apr 15 19:30:02 GMT 2020} -2020-04-15 18:30:03,274 INFO [io.quarkus] (main) vault-quickstart 1.0-SNAPSHOT (powered by Quarkus 999-SNAPSHOT) started in 4.255s. Listening on: http://0.0.0.0:8080 +2020-04-15 18:30:03,274 INFO [io.quarkus] (main) vault-quickstart 1.0.0-SNAPSHOT (powered by Quarkus 999-SNAPSHOT) started in 4.255s. Listening on: http://0.0.0.0:8080 2020-04-15 18:30:03,276 INFO [io.quarkus] (main) Profile prod activated. 2020-04-15 18:30:03,276 INFO [io.quarkus] (main) Installed features: [agroal, cdi, hibernate-orm, jdbc-postgresql, narayana-jta, resteasy, vault] ---- Notice in particular the following log line: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- authenticate with jwt at: /var/run/secrets/kubernetes.io/serviceaccount/token => *** ---- Finally curl the `private-key` endpoint to make sure you can retrieve your secret: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl http://localhost:30400/hello/private-key ---- -You should see `{private-key}`. \ No newline at end of file +You should see `{private-key}`. diff --git a/docs/src/main/asciidoc/vault-datasource.adoc b/docs/src/main/asciidoc/vault-datasource.adoc index bd7f5ef894d68..b353510284700 100644 --- a/docs/src/main/asciidoc/vault-datasource.adoc +++ b/docs/src/main/asciidoc/vault-datasource.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using HashiCorp Vault with Databases @@ -109,7 +109,7 @@ public int geGiftCount() { Start a PostgreSQL database: -[source, shell] +[source,bash] ---- docker run --ulimit memlock=-1:-1 -it --rm=true --memory-swappiness=0 --name postgres-quarkus-hibernate -e POSTGRES_USER=sarah -e POSTGRES_PASSWORD=connor -e POSTGRES_DB=mydatabase -p 5432:5432 postgres:10.5 ---- @@ -122,7 +122,7 @@ The simplest approach is to write the database password in the KV secret engine fetched from the Vault MicroProfile Config Source. Open a new shell, `docker exec` in the Vault container and set the root token: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- docker exec -it dev-vault sh export VAULT_TOKEN={root-token} @@ -130,7 +130,7 @@ export VAULT_TOKEN={root-token} Add a `dbpassword` property in the `config` path of the KV secret engine, beside the original `a-private-key` property: -[source, shell] +[source,bash] ---- vault kv put secret/myapps/vault-quickstart/config a-private-key=123456 dbpassword=connor ---- @@ -149,22 +149,22 @@ quarkus.hibernate-orm.database.generation=drop-and-create ---- Compile and start the application: -[source, shell] +[source,bash] ---- ./mvnw package -java -jar target/vault-quickstart-1.0-SNAPSHOT-runner.jar +java -jar target/quarkus-app/quarkus-run.jar ---- Test it with the `gift-count` endpoint: -[source, shell] +[source,bash] ---- curl http://localhost:8080/hello/gift-count ---- You should see: -[source, shell] +[source,text] ---- 0 ---- @@ -187,7 +187,7 @@ we make sure we have a fresh value every time. Create a new path (different than the `config` path) in Vault where the database password will be added: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- vault kv put secret/myapps/vault-quickstart/db password=connor ---- @@ -234,7 +234,7 @@ of databases are supported, such as https://www.vaultproject.io/docs/secrets/dat First we need to enable the `database` secret engine, configure the `postgresql-database-plugin` and create the database role `mydbrole` (replace `10.0.0.3` by the actual host that is running the PostgreSQL container; for simplicity we disabled _TLS_ between Vault and the PostgreSQL database): -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- vault secrets enable database @@ -266,7 +266,7 @@ to create users. ==== Then we need to give a read capability to the Quarkus application on path `database/creds/mydbrole`. -[source, shell] +[source,bash] ---- cat < vault:v1:fN4P7WNjIegpb3lD/pSuhXvyONhGrI21gcKNcedk+5jpjguOw6JkqXYXlkY= ---- And decrypt back this cipher text: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl -X POST --data 'vault:v1:fN4P7WNjIegpb3lD/pSuhXvyONhGrI21gcKNcedk+5jpjguOw6JkqXYXlkY=' --header "Content-Type: text/plain" http://localhost:8080/transit/decrypt # ==> some secret data @@ -238,14 +238,14 @@ public Response verify(VerifyRequest request) { ---- And start signing some data: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl -X POST --data 'some secret data' --header "Content-Type: text/plain" http://localhost:8080/transit/sign # ==> vault:v1:MEUCIQDl+nE4y4E878bkugGG6FG1/RsttaQnoWfZHppeuk4TnQIgTGWTtMhVPCzN8VH/EEr2qp5h34lI1bnEP6L1F+QQoPI= ---- And finally, let's make sure the signature is matching our input data: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- curl -v -X POST --data '{"input":"some secret data","signature":"vault:v1:MEUCIQDl+nE4y4E878bkugGG6FG1/RsttaQnoWfZHppeuk4TnQIgTGWTtMhVPCzN8VH/EEr2qp5h34lI1bnEP6L1F+QQoPI="}' --header "Content-Type: application/json" http://localhost:8080/transit/verify # ==> ... < HTTP/1.1 202 Accepted diff --git a/docs/src/main/asciidoc/vault.adoc b/docs/src/main/asciidoc/vault.adoc index d006d31a8db97..5c383210e7e01 100644 --- a/docs/src/main/asciidoc/vault.adoc +++ b/docs/src/main/asciidoc/vault.adoc @@ -1,13 +1,13 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using HashiCorp Vault include::./attributes.adoc[] :config-file: application.properties -:vault-version: 1.2.2 +:vault-version: 1.6.0 :root-token: s.5VUS8pte13RqekCB2fmMT3u2 :client-token: s.s93BVzJPzBiIGuYJHBTkG8Uw :extension-status: preview @@ -47,20 +47,20 @@ To complete this guide, you need: Let's start Vault in development mode: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- docker run --rm --cap-add=IPC_LOCK -e VAULT_ADDR=http://localhost:8200 -p 8200:8200 -d --name=dev-vault vault:{vault-version} ---- You can check that vault is running with: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- docker logs dev-vault ---- You should see: -[source, shell, subs=attributes+] +[source,text, subs=attributes+] ---- ==> Vault server configuration: @@ -115,21 +115,21 @@ to access a secret stored in the _kv secret engine_. First open a shell inside the vault container: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- docker exec -it dev-vault sh ---- Set the `VAULT_TOKEN` with the value that was printed in the logs: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- export VAULT_TOKEN={root-token} ---- You can check Vault's status using the CLI command `vault status`: -[source, shell] +[source,text] ---- Key Value --- ----- @@ -138,7 +138,7 @@ Initialized true Sealed false Total Shares 1 Threshold 1 -Version 1.2.2 +Version 1.6.0 Cluster Name vault-cluster-b07e80d8 Cluster ID 55bd74b6-eaaf-3862-f7ce-3473ab86c57f HA Enabled false @@ -146,7 +146,7 @@ HA Enabled false Now let's add a secret configuration for our application: -[source, shell] +[source,bash] ---- vault kv put secret/myapps/vault-quickstart/config a-private-key=123456 ---- @@ -155,7 +155,7 @@ We have defined a path `secret/myapps/vault-quickstart` in Vault that we need to Create a policy that gives read access to `secret/myapps/vault-quickstart` and subpaths: -[source, shell] +[source,bash] ---- cat < If you are using the `quarkus-smallrye-health` extension, `quarkus-vault` can add a readiness health check to validate the connection to the Vault server. This is disabled by default. -If enabled, when you access the `/health/ready` endpoint of your application you will have information about the connection validation status. +If enabled, when you access the `/q/health/ready` endpoint of your application you will have information about the connection validation status. This behavior can be enabled by setting the `quarkus.vault.health.enabled` property to `true` in your `application.properties`. @@ -446,12 +453,13 @@ discouraged in production as it is not more secure than talking to Vault in plai == Vault Provisioning Beside the typical client use cases, the Quarkus extension can be used to provision Vault as well, -for instance as part of a CD pipeline. Specific CDI beans support this scenario: +for instance as part of a CD pipeline. Specific CDI beans and operations support this scenario: * `VaultSystemBackendEngine`: create Vault Policies. See the https://www.vaultproject.io/api-docs/system/policy[Vault documentation]. * `VaultKubernetesAuthService`. See the https://www.vaultproject.io/api-docs/auth/kubernetes[Vault documentation]. ** Configure the Kubernetes Auth Method (Kubernetes host, certificates, keys, ...) ** Create Kubernetes Auth Roles (association between Kubernetes service accounts, Kubernetes namespaces and Vault policies) +* `VaultTransitSecretEngine`: _CRUD_ operations on keys. See the https://www.vaultproject.io/api-docs/secret/transit[Vault documentation]. For instance: @@ -512,4 +520,9 @@ credentials), which goes a long way towards creating secured applications. [[configuration-reference]] == Configuration Reference -include::{generated-dir}/config/quarkus-vault.adoc[opts=optional, leveloffset=+1] \ No newline at end of file +include::{generated-dir}/config/quarkus-vault.adoc[opts=optional, leveloffset=+1] + +== References + +* https://www.vaultproject.io/[HashiCorp Vault] +* link:security[Quarkus Security] diff --git a/docs/src/main/asciidoc/vertx.adoc b/docs/src/main/asciidoc/vertx.adoc index b34d15a241f96..bc28612aaaf6e 100644 --- a/docs/src/main/asciidoc/vertx.adoc +++ b/docs/src/main/asciidoc/vertx.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using Eclipse Vert.x @@ -22,19 +22,21 @@ You can also use various Vert.x APIs in your Quarkus application, such as deploy To access Vert.x, well, you need to enable the `vertx` extension to use this feature. If you are creating a new project, set the `extensions` parameter are follows: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=vertx-quickstart \ - -Dextensions="vertx" + -DclassName="org.acme.vertx.GreetingResource" \ + -Dpath="/hello" \ + -Dextensions="resteasy,vertx" cd vertx-quickstart ---- If you have an already created project, the `vertx` extension can be added to an existing Quarkus project with the `add-extension` command: -[source,shell] +[source,bash] ---- ./mvnw quarkus:add-extension -Dextensions="vertx" ---- @@ -73,7 +75,7 @@ to use the correct one, that includes none at all on unsupported platforms: You will also have to explicitly configure Vert.x to use the native transport. In `application.properties` add: -[source] +[source,properties] ---- quarkus.vertx.prefer-native-transport=true ---- @@ -133,9 +135,9 @@ Once the extension has been added, you can access the _managed_ Vert.x instance ---- If you are familiar with Vert.x, you know that Vert.x provides different API models. -For instance _bare_ Vert.x uses callbacks, the Mutiny variants uses `Uni` and `Multi`, the RX Java 2 version uses `Single`, `Maybe`, `Completable`, `Observable` and `Flowable`... +The _bare_ Vert.x uses callbacks, while the Mutiny variant uses `Uni` and `Multi`. -Quarkus provides 4 Vert.x APIs: +Quarkus provides 2 Vert.x APIs: [options="header"] |=== @@ -146,21 +148,15 @@ Quarkus provides 4 Vert.x APIs: | https://smallrye.io/smallrye-mutiny/[Mutiny] | `@Inject io.vertx.mutiny.core.Vertx vertx` | The Mutiny API for Vert.x. -| RX Java 2 | `@Inject io.vertx.reactivex.core.Vertx vertx` | RX Java 2 Vert.x, the API uses RX Java 2 types (deprecated). - -| _Axle_ | `@Inject io.vertx.axle.core.Vertx vertx` | Axle Vert.x, the API uses `CompletionStage` and `Reactive Streams` (deprecated). - |=== -TIP: You may inject any of the 4 flavors of `Vertx` as well as the `EventBus` in your Quarkus application beans: `bare`, `Mutiny`, `Axle`, `RxJava2`. +TIP: You may inject any of the 2 flavors of `Vertx` as well as the `EventBus` in your Quarkus application beans: `bare` and `Mutiny`. They are just shims and rely on a single _managed_ Vert.x instance. You will pick one or the other depending on your use cases. - `bare`: for advanced usage or if you have existing Vert.x code you want to reuse in your Quarkus application - `mutiny`: Mutiny is an event-driven reactive programming API. It uses 2 types: `Uni` and `Multi`. This is the recommended API. -- `Axle`: works well with Quarkus and MicroProfile APIs (`CompletionStage` for single results and `Publisher` for streams) - deprecated, it is recommended to switch to Mutiny -- `Rx Java 2`: when you need support for a wide range of data transformation operators on your streams - deprecated, it is recommended to switch to Mutiny The following snippets illustrate the difference between these 4 APIs: @@ -183,25 +179,6 @@ vertx.fileSystem().readFile("lorem-ipsum.txt") content -> System.out.println("Content: " + content), err -> System.out.println("Cannot read the file: " + err.getMessage()) ); - -// Rx Java 2 Vert.x -vertx.fileSystem().rxReadFile("lorem-ipsum.txt") - .map(buffer -> buffer.toString("UTF-8")) - .subscribe( - content -> System.out.println("Content: " + content), - err -> System.out.println("Cannot read the file: " + err.getMessage()) - ); - -// Axle API: -vertx.fileSystem().readFile("lorem-ipsum.txt") - .thenApply(buffer -> buffer.toString("UTF-8")) - .whenComplete((content, err) -> { - if (err != null) { - System.out.println("Cannot read the file: " + err.getMessage()); - } else { - System.out.println("Content: " + content); - } - }); ---- [TIP] @@ -235,7 +212,7 @@ public class GreetingResource { ---- -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -465,22 +442,8 @@ Depending on the API model you want to use you need to add the right dependency io.smallrye.reactive smallrye-mutiny-vertx-web-client - - - - io.smallrye.reactive - smallrye-axle-web-client - - - - - io.vertx - vertx-rx-java2 - ---- -NOTE: The `vertx-rx-java2` provides the RX Java 2 API for the whole Vert.x stack, not only the web client. - In this guide, we are going to use the Mutiny API, so: [source, xml, subs=attributes+] @@ -556,7 +519,7 @@ The `WebClient` is obviously asynchronous (and non-blocking), to the endpoint re Run the application with: -[source, shell] +[source,bash] ---- ./mvnw compile quarkus:dev ---- @@ -567,14 +530,14 @@ The application can also run as a native executable. But, first, we need to instruct Quarkus to enable _ssl_. Open the `src/main/resources/application.properties` and add: -[source] +[source,properties] ---- quarkus.ssl.native=true ---- Then, create the native executable with: -[source, shell] +[source,bash] ---- ./mvnw package -Pnative ---- @@ -701,8 +664,9 @@ In environments with read only file systems you may receive errors of the form: java.lang.IllegalStateException: Failed to create cache dir ---- -Assuming `/tmp/` is writeable this can be fixed by setting the `vertx.cacheDirBase` property to point to a directory in `/tmp/` for instance in OpenShift by creating an environment variable `JAVA_OPTS` with the value `-Dvertx.cacheDirBase=/tmp/vertx`. +Assuming `/tmp/` is writable this can be fixed by setting the `vertx.cacheDirBase` property to point to a directory in `/tmp/` for instance in OpenShift by creating an environment variable `JAVA_OPTS` with the value `-Dvertx.cacheDirBase=/tmp/vertx`. +[[reverse-proxy]] == Running behind a reverse proxy Quarkus could be accessed through proxies that additionally generate headers (e.g. `X-Forwarded-Host`) to keep @@ -714,28 +678,27 @@ IMPORTANT: Activating this feature makes the server exposed to several security Consider activate it only when running behind a reverse proxy. To setup this feature, please include the following lines in `src/main/resources/application.properties`: -[source] +[source,properties] ---- quarkus.http.proxy-address-forwarding=true ---- To consider only de-facto standard header (`Forwarded` header), please include the following lines in `src/main/resources/application.properties`: -[source] +[source,properties] ---- -quarkus.http.allow-forwarded=true +quarkus.http.proxy.allow-forwarded=true ---- To consider only non-standard headers, please include the following lines instead in `src/main/resources/application.properties`: -[source] +[source,properties] ---- -quarkus.http.proxy-address-forwarding=true +quarkus.http.proxy.proxy-address-forwarding=true quarkus.http.proxy.enable-forwarded-host=true quarkus.http.proxy.enable-forwarded-prefix=true ---- -Both configurations related with standard and non-standard headers can be combine. -In this case, the `Forwarded` header will have precedence in presence of both set of headers. +Both configurations related to standard and non-standard headers can be combined, although the standard headers configuration will have precedence. Supported forwarding address headers are: @@ -746,6 +709,18 @@ Supported forwarding address headers are: * `X-Forwarded-Ssl` * `X-Forwarded-Prefix` +[[same-site-cookie]] +== SameSite cookies + +One can easily add a https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie/SameSite[SameSite] cookie property to any of the cookies set by a Quarkus endpoint by listing a cookie name and a `SameSite` attribute, for example: + +[source] +---- +quarkus.http.same-site-cookie.jwt.value=Lax +quarkus.http.same-site-cookie.session.value=Strict +---- + +Given this configuration, the `jwt` cookie will have a `SameSite=Lax` attribute and the `session` cookie will have a `SameSite=Strict` attribute. == Going further @@ -755,7 +730,7 @@ There are many other facets of Quarkus using Vert.x underneath: Quarkus integrates it so different beans can interact with asynchronous messages. This part is covered in the link:reactive-event-bus[event bus documentation]. -* Data streaming and Apache Kafka are a important parts of modern systems. +* Data streaming and Apache Kafka are an important part of modern systems. Quarkus integrates data streaming using Reactive Messaging. More details on link:kafka[Interacting with Kafka]. diff --git a/docs/src/main/asciidoc/websockets.adoc b/docs/src/main/asciidoc/websockets.adoc index 88990ae04f8d1..2251c6bfb008f 100644 --- a/docs/src/main/asciidoc/websockets.adoc +++ b/docs/src/main/asciidoc/websockets.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Using WebSockets @@ -40,23 +40,24 @@ The solution is located in the `websockets-quickstart` {quickstarts-tree-url}/we First, we need a new project. Create a new project with the following command: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=websockets-quickstart \ - -Dextensions="undertow-websockets" + -Dextensions="websockets" \ + -DnoExamples cd websockets-quickstart ---- -This command generates the Maven project (without any classes) and import the `undertow-websockets` extension. +This command generates the Maven project (without any classes) and import the `websockets` extension. -If you already have your Quarkus project configured, you can add the `undertow-websockets` extension +If you already have your Quarkus project configured, you can add the `websockets` extension to your project by running the following command in your project base directory: [source,bash] ---- -./mvnw quarkus:add-extension -Dextensions="undertow-websockets" +./mvnw quarkus:add-extension -Dextensions="websockets" ---- This will add the following to your `pom.xml`: @@ -65,7 +66,7 @@ This will add the following to your `pom.xml`: ---- io.quarkus - quarkus-undertow-websockets + quarkus-websockets ---- @@ -145,9 +146,10 @@ Create the `src/main/resources/META-INF/resources` directory and copy this {quic Now, let's see our application in action. Run it with: -``` +[source,bash] +---- ./mvnw compile quarkus:dev -``` +---- Then open your 2 browser windows to http://localhost:8080/: @@ -157,7 +159,83 @@ Then open your 2 browser windows to http://localhost:8080/: image:websocket-guide-screenshot.png[alt=Application] -As usual, the application can be packaged using `./mvnw clean package` and executed using the `-runner.jar` file. +As usual, the application can be packaged using `./mvnw clean package` and executed using the `target/quarkus-app/quarkus-run.jar` file. You can also build the native executable using `./mvnw package -Pnative`. -You can also test your web socket applications using the approach detailed {quickstarts-blob-url}/websockets-quickstart/src/test/java/org/acme/websockets/ChatTestCase.java[here]. +You can also test your web socket applications using the approach detailed {quickstarts-blob-url}/websockets-quickstart/src/test/java/org/acme/websockets/ChatTest.java[here]. + +== WebSocket Clients + +Quarkus also contains a WebSocket client. You can call `ContainerProvider.getWebSocketContainer().connectToServer` to create a websocket connection. + +When you connect to the server you can either pass in the Class of the annotated client endpoint you want to use, or an instance of `javax.websocket.Endpoint`. If you +are using the annotated endpoint then you can use the exact same annotations as you can on the server, except it must be annotated with `@ClientEndpoint` instead of +`@ServerEndpoint`. + +The example below shows the client being used to test the chat endpoint above. + +[source,java] +---- +package org.acme.websockets; + +import java.net.URI; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import javax.websocket.ClientEndpoint; +import javax.websocket.ContainerProvider; +import javax.websocket.OnMessage; +import javax.websocket.OnOpen; +import javax.websocket.Session; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.common.http.TestHTTPResource; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +public class ChatTest { + + private static final LinkedBlockingDeque MESSAGES = new LinkedBlockingDeque<>(); + + @TestHTTPResource("/chat/stu") + URI uri; + + @Test + public void testWebsocketChat() throws Exception { + try (Session session = ContainerProvider.getWebSocketContainer().connectToServer(Client.class, uri)) { + Assertions.assertEquals("CONNECT", MESSAGES.poll(10, TimeUnit.SECONDS)); + Assertions.assertEquals("User stu joined", MESSAGES.poll(10, TimeUnit.SECONDS)); + session.getAsyncRemote().sendText("hello world"); + Assertions.assertEquals(">> stu: hello world", MESSAGES.poll(10, TimeUnit.SECONDS)); + } + } + + @ClientEndpoint + public static class Client { + + @OnOpen + public void open(Session session) { + MESSAGES.add("CONNECT"); + // Send a message to indicate that we are ready, + // as the message handler may not be registered immediately after this callback. + session.getAsyncRemote().sendText("_ready_"); + } + + @OnMessage + void message(String msg) { + MESSAGES.add(msg); + } + + } + +} +---- + + +== More WebSocket Information + +The Quarkus WebSocket implementation is an implementation of link:https://eclipse-ee4j.github.io/websocket-api/[Jakarta Websockets]. + + diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc old mode 100755 new mode 100644 index 679482e3bc13c..2d79e65bd0008 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -1,7 +1,7 @@ //// This guide is maintained in the main Quarkus repository and pull requests should be submitted there: -https://github.com/quarkusio/quarkus/tree/master/docs/src/main/asciidoc +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Quarkus - Writing Your Own Extension @@ -17,6 +17,8 @@ Quarkus extensions add a new developer focused behavior to the core offering, an This means that metadata is only processed once at build time, which both saves on startup time, and also on memory usage as the classes etc that are used for processing are not loaded (or even present) in the runtime JVM. +NOTE: This is an in-depth documentation, see the link:building-my-first-extension[building my first extension] if you need an introduction. + == Extension philosophy This section is a work in progress and gathers the philosophy under which extensions should be designed and written. @@ -91,7 +93,7 @@ An extension can produce an < +@Singleton <1> public class Echo { public String echo(String val) { @@ -125,7 +127,7 @@ And vice versa - the extension bean can inject application beans and beans provi .Extension Bean Injection Example [source%nowrap,java] ---- -@ApplicationScoped +@Singleton public class Echo { @Inject @@ -321,7 +323,7 @@ These operations should be done at build time and the metamodel be passed to the Get a CDI portable extension running:: The CDI portable extension model is very flexible. Too flexible to benefit from the build time boot promoted by Quarkus. -Most extension we have seen do not make use of these extreme flexibilities capabilities. +Most extension we have seen do not make use of these extreme flexibility capabilities. The way to port a CDI extension to Quarkus is to rewrite it as a Quarkus extension which will define the various beans at build time (deployment time in extension parlance). == Technical aspect @@ -380,7 +382,7 @@ TIP: You may want to use the `create-extension` mojo of `io.quarkus:quarkus-mave Your runtime artifact should depend on `io.quarkus:quarkus-core`, and possibly the runtime artifacts of other Quarkus modules if you want to use functionality provided by them. You will also need to include the `io.quarkus:quarkus-bootstrap-maven-plugin` to generate the Quarkus extension descriptor included into the runtime artifact, if you are using the Quarkus parent pom it will automatically inherit the correct configuration. -Futhermore, you'll need to configure the `maven-compiler-plugin` to detect the `quarkus-extension-processor` annotation processor. +Furthermore, you'll need to configure the `maven-compiler-plugin` to detect the `quarkus-extension-processor` annotation processor. NOTE: By convention the deployment time artifact has the `-deployment` suffix, and the runtime artifact has no suffix (and is what the end user adds to their project). @@ -467,25 +469,32 @@ You will also need to configure the `maven-compiler-plugin` to detect the `quark ---- -==== Create new extension modules using Maven +==== Create new Quarkus Core extension modules using Maven -The `create-extension` mojo of `io.quarkus:quarkus-maven-plugin` can be used to generate stubs of Maven modules needed for implementing a new Quarkus extension. +Quarkus provides `create-extension` Maven Mojo to initialize your extension project. -WARNING: This mojo can be currently used only for adding extensions to an established source tree hosting multiple extensions in one subdirectory, such as https://github.com/quarkusio/quarkus[Quarkus] or https://github.com/apache/camel-quarkus[Camel Quarkus]. Creating extension projects from scratch is not supported yet. +It will try to auto-detect its options: + +* from `quarkus` (Quarkus Core) or `quarkus/extensions` directory, it will use the 'Quarkus Core' extension layout and defaults. +* with `-DgroupId=io.quarkiverse.[extensionId]`, it will use the 'Quarkiverse' extension layout and defaults. +* in other cases it will use the 'Standalone' extension layout and defaults. +* we may introduce other layout types in the future. + +TIP: You may not specify any parameter to use the interactive mode: `mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create-extension -N` As and example, let's add a new extension called `my-ext` to the Quarkus source tree: -[source, shell, subs=attributes+] +[source,bash, subs=attributes+] ---- git clone https://github.com/quarkusio/quarkus.git cd quarkus -cd extensions mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create-extension -N \ - -Dquarkus.artifactIdBase=my-ext \ - -Dquarkus.artifactIdPrefix=quarkus- \ - -Dquarkus.nameBase="My Extension" + -DextensionId=my-ext \ + -Dname="My Extension" ---- +NOTE: by default, the `groupId`, `version`, `quarkusVersion`, `namespaceId`, and `namespaceName` will be consistent with other Quarkus core extensions. + The above sequence of commands does the following: * Creates four new Maven modules: @@ -495,14 +504,11 @@ The above sequence of commands does the following: ** `quarkus-my-ext-integration-test` in the `integration-tests/my-ext/deployment` directory; an empty JAX-RS Resource class and two test classes (for JVM mode and native mode) are generated in this module. * Links these three modules where necessary: ** `quarkus-my-ext-parent` is added to the `` of `quarkus-extensions-parent` -** `quarkus-my-ext` is added to the `` of the runtime BOM (Bill of Materials) `bom/runtime/pom.xml` -** `quarkus-my-ext-deployment` is added to the `` of the deployment BOM (Bill of Materials) `bom/deployment/pom.xml` +** `quarkus-my-ext` is added to the `` of the Quarkus BOM (Bill of Materials) `bom/application/pom.xml` +** `quarkus-my-ext-deployment` is added to the `` of the Quarkus BOM (Bill of Materials) `bom/application/pom.xml` ** `quarkus-my-ext-integration-test` is added to the `` of `quarkus-integration-tests-parent` -A Maven build performed immediately after generating the modules should fail due to a `fail()` assertion in one of the test classes. - -There is one step (specific to the Quarkus source tree) that you should do manually when creating a new extension: -create a `quarkus-extension.yaml` file that describe your extension inside the runtime module `src/main/resources/META-INF` folder. +NOTE: You also have to fill the `quarkus-extension.yaml` file that describe your extension inside the runtime module `src/main/resources/META-INF` folder. This is the `quarkus-extension.yaml` of the `quarkus-agroal` extension, you can use it as an example: @@ -521,30 +527,9 @@ metadata: status: "stable" ---- -Note that the parameters of the mojo that will be constant for all the extensions added to this source tree are configured in `extensions/pom.xml` so that they do not need to be passed on the command line each time a new extension is added: - -[source,xml] ----- - - io.quarkus - quarkus-maven-plugin - ${quarkus.version} - false - - - Quarkus - - ../bom/runtime/pom.xml - ../bom/deployment/pom.xml - ../integration-tests/pom.xml - - ----- - -TIP: The `nameBase` parameter of the mojo is optional. -If you do not specify it on the command line, the plugin will derive it from `artifactIdBase` by replacing dashes with spaces and uppercasing each token. -So you may consider omitting explicit `nameBase` in some cases. +TIP: The `name` parameter of the mojo is optional. +If you do not specify it on the command line, the plugin will derive it from `extensionId` by replacing dashes with spaces and uppercasing each token. +So you may consider omitting explicit `name` in some cases. // The following link should point to the mojo page once https://github.com/quarkusio/quarkusio.github.io/issues/265 is fixed Please refer to https://github.com/quarkusio/quarkus/blob/{quarkus-version}/devtools/maven/src/main/java/io/quarkus/maven/CreateExtensionMojo.java[CreateExtensionMojo JavaDoc] for all the available options of the mojo. @@ -913,18 +898,6 @@ ExecutorBuildItem createExecutor( // <2> <1> Note the `optional` attribute. <2> This example is using recorder proxies; see the section on <> for more information. -==== Capabilities - -The `@BuildStep` annotation has a `providesCapabilities` property that can be used to provide capability information -to other extensions about what is present in the current application. Capabilities are simply strings that are used to -describe an extension. Capabilities should generally be named after an extensions root package, for example the transactions -extension will provide `io.quarkus.transactions`. - -To check if a capability is present you can inject the `io.quarkus.deployment.Capabilities` object and call -`isCapabilityPresent`. - -Capabilities should be used when checking for the presence of an extension rather than class path based checks. - ==== Application Archives The `@BuildStep` annotation can also register marker files that determine which archives on the class path are considered @@ -1093,8 +1066,9 @@ lowercased, and re-joined using hyphens (`-`). A configuration root's class name can contain an extra suffix segment for the case where there are configuration roots for multiple <>. Classes which correspond to the `BUILD_TIME` and `BUILD_AND_RUN_TIME_FIXED` -may end with `BuildTimeConfig` or `BuildTimeConfiguration`, and classes which correspond to the `RUN_TIME` phase -may end with `RuntimeConfig`, `RunTimeConfig`, `RuntimeConfiguration` or `RunTimeConfiguration`. +may end with `BuildTimeConfig` or `BuildTimeConfiguration`, classes which correspond to the `RUN_TIME` phase +may end with `RuntimeConfig`, `RunTimeConfig`, `RuntimeConfiguration` or `RunTimeConfiguration` while classes which correspond +to the `BOOTSTRAP` configuration may end with `BootstrapConfig` or `BootstrapConfiguration`. Note: The current implementation is still using injection site to determine the root set, so to avoid migration problems, it is recommended that the injection site (field or parameter) have the same name as the configuration root class until @@ -1128,6 +1102,13 @@ A configuration root dictates when its contained keys are read from configuratio | ✗ | Appropriate for things which affect build and must be visible for run time code. Not read from config at run time. +| BOOTSTRAP +| ✗ +| ✓ +| ✗ +| ✓ +| Used when runtime configuration needs to be obtained from an external system (like `Consul`), but details of that system need to be configurable (for example Consul's URL). The high level way this works is by using the standard Quarkus config sources (such as properties files, system properties, etc.) and producing `ConfigSourceProvider` objects which are subsequently taken into account by Quarkus when creating the final runtime `Config` object. + | RUN_TIME | ✗ | ✓ @@ -1139,6 +1120,8 @@ A configuration root dictates when its contained keys are read from configuratio For all cases other than the `BUILD_TIME` case, the configuration root class and all of the configuration groups and types contained therein must be located in, or reachable from, the extension's run time artifact. Configuration roots of phase `BUILD_TIME` may be located in or reachable from either of the extension's run time or deployment artifacts. +IMPORTANT: _Bootstrap_ configuration steps are executed during runtime-init *before* any of other runtime steps. This means that code executed as part of this step cannot access anything that gets initialized in runtime init steps (runtime synthetic CDI beans being one such example). + ==== Configuration Example [source%nowrap,java] @@ -1359,6 +1342,9 @@ static class IsDevMode implements BooleanSupplier { } ---- +If you need to make your build step conditional on the presence or absence of another extension, you can +use <> for that. + [id='bytecode-recording'] === Bytecode Recording @@ -1439,7 +1425,7 @@ convenience to avoid the need to explicitly load classes in the recorders. ==== Printing step execution time At times, it can be useful to know how the exact time each startup task (which is the result of each bytecode recording) takes when the application is run. -The simplest way to determine this information is to set the `quarkus.debug.print-startup-times` property to `true` when running the application. +The simplest way to determine this information is to launch the Quarkus application with the `-Dquarkus.debug.print-startup-times=true` system property. The output will look something like: [source%nowrap] @@ -1467,6 +1453,58 @@ As a CDI based runtime, Quarkus extensions often make CDI beans available as par However, Quarkus DI solution does not support CDI Portable Extensions. Instead, Quarkus extensions can make use of various link:cdi-reference[Build Time Extension Points]. +=== Quarkus DEV UI + +You can make your extension support the link:dev-ui[Quarkus DEV UI] for a greater developer experience. + +=== Extension-defined endpoints + +Your extension can add additional, non-application endpoints to be served alongside endpoints +for Health, Metrics, OpenAPI, Swagger UI, etc. + +Use a `NonApplicationRootPathBuildItem` to define an endpoint: + +[source%nowrap,java] +---- +@BuildStep +RouteBuildItem myExtensionRoute(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { + return nonApplicationRootPathBuildItem.routeBuilder() + .route("custom-endpoint") + .handler(new MyCustomHandler()) + .displayOnNotFoundPage() + .build(); +} +---- + +Note that the path above does not start with a '/', indicating it is a relative path. The above +endpoint will be served relative to the configured non-application endpoint root. The non-application +endpoint root is `/q` by default, which means the resulting endpoint will be found at `/q/custom-endpoint`. + +Absolute paths are handled differently. If the above called `route("/custom-endpoint")`, the resulting +endpoint will be found at `/custom-endpoint`. + +If an extension needs nested non-application endpoints: + +[source%nowrap,java] +---- +@BuildStep +RouteBuildItem myNestedExtensionRoute(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { + return nonApplicationRootPathBuildItem.routeBuilder() + .nestedRoute("custom-endpoint", "deep") + .handler(new MyCustomHandler()) + .displayOnNotFoundPage() + .build(); +} +---- + +Given a default non-application endpoint root of `/q`, this will create an endpoint at `/q/custom-endpoint/deep`. + +Absolute paths also have an impact on nested endpoints. If the above called `nestedRoute("custom-endpoint", "/deep")`, +the resulting endpoint will be found at `/deep`. + +Refer to the link:all-config#quarkus-vertx-http_quarkus.http.non-application-root-path[Quarkus Vertx HTTP configuration reference] +for details on how the non-application root path is configured. + === Extension Health Check Health checks are provided via the `quarkus-smallrye-health` extension. It provides both liveness and readiness checks capabilities. @@ -1496,266 +1534,286 @@ HealthBuildItem addHealthCheck(AgroalBuildTimeConfig agroalBuildTimeConfig) { === Extension Metrics -An extension can decide to provide metrics through the `quarkus-smallrye-metrics` extension. -A typical use case for this would be that an extension scans the application code for relevant components (like entities, messaging endpoints, etc.) -and creates a set of metrics for each of these components. A unified mechanism for metric registration is provided via the `MetricBuildItem` class -provided by the `quarkus-smallrye-metrics-spi` module. +The `quarkus-micrometer` extension and the `quarkus-smallrye-metrics` extension provide support for collecting metrics. +As a compatibility note, the `quarkus-micrometer` extension adapts the MP Metrics API to Micrometer library primitives, so the `quarkus-micrometer` extension can be enabled without breaking code that relies on the MP Metrics API. +Note that the metrics emitted by Micrometer are different, see the `quarkus-micrometer` extension documentation for more information. -There are several distinct situations that can occur and each requires slightly different handling: +NOTE: The compatibility layer for MP Metrics APIs will move to a different extension in the future. -1. The underlying library used by your extension is using the MicroProfile Metrics API directly. -2. The underlying library uses its own way for collecting metrics and makes them available at runtime using its own API. -3. The underlying library does not provide metrics (or there is no library at all) and you need to insert some code in the extension's codebase that will collect the metrics. +There are two broad patterns that extensions can use to interact with an optional metrics extension to add their own metrics: -What is common for all cases is that the extension should have a build-time config property that enables metrics exposure for the extension, -it should be named `quarkus..metrics.enabled` and be `false` by default. +* Consumer pattern: An extension declares a `MetricsFactoryConsumerBuildItem` and uses that to provide a bytecode recorder to the metrics extension. When the metrics extension has initialized, it will iterate over registered consumers to initialize them with a `MetricsFactory`. This factory can be used to declare API-agnostic metrics, which can be a good fit for extensions that provide an instrumentable object for gathering statistics (e.g. Hibernate's `Statistics` class). -==== Case 1: The library uses MP Metrics -If the library exposes metrics by itself, we don't have to do much. However, there are a few points to consider: +* Binder pattern: An extension can opt to use completely different gathering implementations depending on the metrics system. An `Optional metricsCapability` build step parameter can be used to declare or otherwise initialize API-specific metrics based on the active metrics extension (e.g. "smallrye-metrics" or "micrometer"). This pattern can be combined with the consumer pattern by using `MetricsFactory::metricsSystemSupported()` to test the active metrics extension within the recorder. -- It should be possible to disable all metrics for an extension, which can be a bit problematic if the library registers them directly -rather than through the unified registration mechanism in the `quarkus-smallrye-metrics-spi` module. Therefore the library should contain -a way to turn all metrics off, for example using a library-specific system property that can be set during build time (by emitting a `SystemPropertyBuildItem`), -and will disable metrics if at least one of the properties `quarkus..metrics.enabled` and `quarkus.smallrye-metrics.extensions.enabled` is `false`. -- It is desirable to be able to omit the MP Metrics dependency at runtime, so if possible, the library should be written in a way that -it will still work when the MP Metrics dependencies (or at least the implementation, `io.smallrye:smallrye-metrics`) are unavailable. This can be achieved by -wrapping all code that does something metric-related into an `if` condition that checks whether metrics integration is enabled. If the library performs injections -of the `MetricRegistry`, which is not avoidable by introducing an `if` condition, it will unfortunately not be possible to remove the `microprofile-metrics` dependency, -but it is still possible to avoid the need for `smallrye-metrics` (which contains the implementation class of the metric registry) by introducing a custom "no-op" -implementation of the `MetricRegistry` which can, for example, return `null` from all its methods. This no-op implementation should be added to the application's classes -so that it can be injected instead of the regular `io.smallrye.metrics.MetricRegistryImpl`. An example of a no-op implementation can be found at -https://github.com/quarkusio/quarkus/blob/master/extensions/smallrye-fault-tolerance/runtime/src/main/java/io/quarkus/smallrye/faulttolerance/runtime/NoopMetricRegistry.java[NoopMetricRegistry] +Remember that support for metrics is optional. Extensions can use an `Optional metricsCapability` parameter in their build step to test for the presence of an enabled metrics extension. Consider using additional configuration to control behavior of metrics. Datasource metrics can be expensive, for example, so additional configuration flags are used enable metrics collection on individual datasources. + +When adding metrics for your extension, you may find yourself in one of the following situations: + +1. An underlying library used by the extension is using a specific Metrics API directly (either MP Metrics, Micrometer, or some other). +2. An underlying library uses its own mechanism for collecting metrics and makes them available at runtime using its own API, e.g. Hibernate's `Statistics` class, or Vert.x `MetricsOptions`. +3. An underlying library does not provide metrics (or there is no library at all) and you want to add instrumentation. + +==== Case 1: The library uses a metrics library directly + +If the library directly uses a metrics API, there are two options: + +- Use an `Optional metricsCapability` parameter to test which metrics API is supported (e.g. "smallrye-metrics" or "micrometer") in your build step, and use that to selectively declare or initialize API-specific beans or build items. + +- Create a separate build step that consumes a `MetricsFactory`, and use the `MetricsFactory::metricsSystemSupported()` method within the bytecode recorder to initialize required resources if the desired metrics API is supported (e.g. "smallrye-metrics" or "micrometer"). + +Extensions may need to provide a fallback if there is no active metrics extension or the extension doesn't support the API required by the library. ==== Case 2: The library provides its own metric API -In this case, the extension can make use of the `quarkus-smallrye-metrics-spi` module to expose the metrics from the library using MP Metrics. -You should do the following: +There are two examples of a library providing its own metrics API: + +- The extension defines an instrumentable object as Agroal does with `io.agroal.api.AgroalDataSourceMetrics`, or +- The extension provides its own abstraction of metrics, as Jaeger does with `io.jaegertracing.spi.MetricsFactory`. -- Import the `quarkus-smallrye-metrics-spi` library in your deployment module. -- Import the `quarkus-smallrye-metrics` extension as an **optional** dependency in your runtime module so it will not impact the size of the application if -metrics are not included. -- In your processor, produce any number of `MetricBuildItem` items as shown in the example below. -The metric name of all metrics should start with a short name of the extension that owns them. -- Pass the value of `quarkus..metrics.enabled` to the constructor of `MetricBuildItem` objects that the extension produces and if it is false, the metric will be ignored. +===== Observing instrumentable objects -An example build step for build-time registration of metrics: +Let's take the instrumentable object (`io.agroal.api.AgroalDataSourceMetrics`) case first. In this case, you can do the following: +- Define a `BuildStep` that produces a `MetricsFactoryConsumerBuildItem` that uses a `RUNTIME_INIT` or `STATIC_INIT` Recorder to define a `MetricsFactory` consumer. For example, the following creates a `MetricsFactoryConsumerBuildItem` if and only if metrics are enabled both for Agroal generally, and for a datasource specifically: ++ [source%nowrap,java] ---- @BuildStep -void registerMetrics(BuildProducer metrics) { - Metadata activeCountMetadata = Metadata.builder() - .withName("agroal.active.count") - .withDescription("Count of active connections") - .withType(MetricType.GAUGE) - .build(); - List dataSources = getAllDataSources(); // your relevant components specific to the extension - for(DataSource dataSource : dataSources) { - metrics.produce(new MetricBuildItem(activeCountMetadata, - new ActiveCountGauge(dataSource.getName()), - agroalBuildTimeConfig.metricsEnabled, - configRootName, // name of the config root pertaining to this extension - new Tag("datasource", dataSource.getName()))); +@Record(ExecutionTime.RUNTIME_INIT) +void registerMetrics(AgroalMetricsRecorder recorder, + DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, + BuildProducer datasourceMetrics, + List aggregatedDataSourceBuildTimeConfigs) { + + for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedDataSourceBuildTimeConfigs) { + // Create a MetricsFactory consumer to register metrics for a data source + // IFF metrics are enabled globally and for the data source + // (they are enabled for each data source by default if they are also enabled globally) + if (dataSourcesBuildTimeConfig.metricsEnabled && + aggregatedDataSourceBuildTimeConfig.getJdbcConfig().enableMetrics.orElse(true)) { + datasourceMetrics.produce(new MetricsFactoryConsumerBuildItem( + recorder.registerDataSourceMetrics(aggregatedDataSourceBuildTimeConfig.getName()))); + } } } ---- -In this example, there is a gauge per each data source that shows the current number of active connections. -In this example, we provide an object that implements the logic of the metric, in the case of a gauge, -it needs to implement `org.eclipse.microprofile.metrics.Gauge`. Example: - +- The associated recorder should use the provided `MetricsFactory` to register metrics. For Agroal, this means using the `MetricFactory` API to observe `io.agroal.api.AgroalDataSourceMetrics` methods. For example: ++ [source%nowrap,java] ---- -public class ActiveCountGauge implements Gauge { - - private String dataSourceName; - - public ActiveCountGauge() { - - } - - public ActiveCountGauge(String dataSourceName) { - this.dataSourceName = dataSourceName; - } +/* RUNTIME_INIT */ +public Consumer registerDataSourceMetrics(String dataSourceName) { + return new Consumer() { + @Override + public void accept(MetricsFactory metricsFactory) { + String tagValue = DataSourceUtil.isDefault(dataSourceName) ? "default" : dataSourceName; + AgroalDataSourceMetrics metrics = getDataSource(dataSourceName).getMetrics(); - @Override - public Long getValue() { - DataSource ds = obtainDataSource(dataSourceName); // some logic to get hold of the data source - return ds.getMetrics().getActiveCount(); - } -} + // When using MP Metrics, the builder uses the VENDOR registry by default. + metricsFactory.builder("agroal.active.count") + .description( + "Number of active connections. These connections are in use and not available to be acquired.") + .tag("datasource", tagValue) + .buildGauge(metrics::activeCount); + .... ---- -When registering gauges, it is required to provide a custom object that implements the correct metric type. -For metric types other than gauges, this is optional - you can choose whether to provide an implementation, -or whether the default implementation class from `SmallRye Metrics` should be instantiated for the metric and registered -in the metric registry. To update the metric value, it will be necessary to look them up in the metric registry -and call the methods specific to each metric type. - -Look into the `AgroalProcessor#registerMetrics` method for an example how this was done for the Agroal extension. +The `MetricsFactory` provides a fluid builder for registration of metrics, with the final step constructing gauges or counters based on a `Supplier` or `ToDoubleFunction`. Timers can either wrap `Callable`, `Runnable`, or `Supplier` implementations, or can use a `TimeRecorder` to accumulate chunks of time. The underlying metrics extension will create appropriate artifacts to observe or measure the defined functions. -==== Case 3: It is necessary to collect metrics within the extension code +===== Using a Metrics API-specific implementation -In this case, the dependency setup and build-time registration is the same as in case 2. The difference will be that instead of introducing -custom metric objects that bridge between library-specific metrics and MP Metrics, the extension's code will contain metric collection code like this: +Using metrics-API specific implementations may be preferred in some cases. Jaeger, for example, defines its own metrics interface, `io.jaegertracing.spi.MetricsFactory`, that it uses to define counters and gauges. A direct mapping from that interface to the metrics system will be the most efficient. In this case, it is important to isolate these specialized implementations and to avoid eager classloading to ensure the metrics API remains an optional, compile-time dependency. +`Optional metricsCapability` can be used in the build step to selectively control initialization of beans or the production of other build items. The Jaeger extension, for example, can use the following to control initialization of specialized Metrics API adapters: ++ [source%nowrap,java] ---- -public void methodThatShouldBeCounted() { - if(metricsEnabled) { - Counter counter = MetricRegistries.get(MetricRegistry.Type.VENDOR).counter("name"); - counter.inc(); +/* RUNTIME_INIT */ +@BuildStep +@Record(ExecutionTime.RUNTIME_INIT) +void setupTracer(JaegerDeploymentRecorder jdr, JaegerBuildTimeConfig buildTimeConfig, JaegerConfig jaeger, + ApplicationConfig appConfig, Optional metricsCapability) { + + // Indicates that this extension would like the SSL support to be enabled + extensionSslNativeSupport.produce(new ExtensionSslNativeSupportBuildItem(Feature.JAEGER.getName())); + + if (buildTimeConfig.enabled) { + // To avoid dependency creep, use two separate recorder methods for the two metrics systems + if (buildTimeConfig.metricsEnabled && metricsCapability.isPresent()) { + if (metricsCapability.get().metricsSupported(MetricsFactory.MICROMETER)) { + jdr.registerTracerWithMicrometerMetrics(jaeger, appConfig); + } else { + jdr.registerTracerWithMpMetrics(jaeger, appConfig); + } + } else { + jdr.registerTracerWithoutMetrics(jaeger, appConfig); + } } - // proceed with the invocation normally } ---- -In this case, be aware that the MP Metrics API and implementation might not be available on the runtime classpath! -This is why there is a `if(metricsEnabled)` check that should make sure that if metrics are not enabled, we don't touch MP Metrics classes, -because that would fail. The extension itself will need to provide such check, and it must return true only if both of these conditions hold: +A recorder consuming a `MetricsFactory` can use `MetricsFactory::metricsSystemSupported()` can be used to control initialization of metrics objects during bytecode recording in a similar way. -- The Metrics capability is present and therefore the SmallRye Metrics library is available -- Metrics are not disabled for this extension +==== Case 3: It is necessary to collect metrics within the extension code -The recommended way to implement this check would be to produce a specific `SystemPropertyBuildItem` during build, which will make it easy -to evaluate these two conditions, and then check back to this system property at runtime. Example: +To define your own metrics from scratch, you have two basic options: Use the generic `MetricFactory` builders, or follow the binder pattern, and create instrumentation specific to the enabled metrics extension. +To use the extension-agnostic `MetricFactory` API, your processor can define a `BuildStep` that produces a `MetricsFactoryConsumerBuildItem` that uses a `RUNTIME_INIT` or `STATIC_INIT` Recorder to define a `MetricsFactory` consumer. ++ [source%nowrap,java] ---- @BuildStep -SystemPropertyBuildItem produceMetricsEnabledProperty(AgroalBuildTimeConfig extensionSpecificConfig, - Capabilities capabilities) { - return new SystemPropertyBuildItem("metrics.enabled", - String.valueOf(capabilities.isCapabilityPresent(Capabilities.METRICS) && - extensionSpecificConfig.metricsEnabled)); +@Record(ExecutionTime.RUNTIME_INIT) +MetricsFactoryConsumerBuildItem registerMetrics(MyExtensionRecorder recorder) { + return new MetricsFactoryConsumerBuildItem(recorder.registerMetrics()); } ---- ++ +- The associated recorder should use the provided `MetricsFactory` to register metrics, for example ++ +[source%nowrap,java] +---- +final LongAdder extensionCounter = new LongAdder(); +/* RUNTIME_INIT */ +public Consumer registerMetrics() { + return new Consumer() { + @Override + public void accept(MetricsFactory metricsFactory) { + metricsFactory.builder("my.extension.counter") + .buildGauge(extensionCounter::longValue); + .... +---- + +Remember that metrics extensions are optional. Keep metrics-related initialization isolated from other setup for your extension, and structure your code to avoid eager imports of metrics APIs. Gathering metrics can also be expensive. Consider using additional extension-specific configuration to control behavior of metrics if the presence/absence of metrics support isn't sufficient. === Customizing JSON handling from an extension + Extensions often need to register serializers and/or deserializers for types the extension provides. -For this, both JSON-B and Jackson extension provide a way to register serializer/deserializer from within an +For this, both Jackson and JSON-B extensions provide a way to register serializer/deserializer from within an extension deployment module. Keep in mind that not everybody will need JSON, so you need to make it optional. If an extension intends to provide JSON related customization, -it is strongly advised to provide customization for both JSON-B and Jackson. +it is strongly advised to provide customization for both Jackson and JSON-B. -==== Customizing JSON-B -First, add an *optional* dependency to `quarkus-jsonb` on your extension's runtime module. +==== Customizing Jackson + +First, add an *optional* dependency to `quarkus-jackson` on your extension's runtime module. [source%nowrap,xml] ---- io.quarkus - quarkus-jsonb + quarkus-jackson true ---- -Then create a serializer and/or a deserializer for JSON-B, an example of which can be seen in the `mongodb-panache` extension. +Then create a serializer or a deserializer (or both) for Jackson, an example of which can be seen in the `mongodb-panache` extension. [source%nowrap,java] ---- -public class ObjectIdSerializer implements JsonbSerializer { +public class ObjectIdSerializer extends StdSerializer { + public ObjectIdSerializer() { + super(ObjectId.class); + } @Override - public void serialize(ObjectId obj, JsonGenerator generator, SerializationContext ctx) { - if (obj != null) { - generator.write(obj.toString()); + public void serialize(ObjectId objectId, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) + throws IOException { + if (objectId != null) { + jsonGenerator.writeString(objectId.toString()); } } } ---- -Add a dependency to `quarkus-jsonb-spi` on your extension's deployment module. +Add a dependency to `quarkus-jackson-spi` on your extension's deployment module. [source%nowrap,xml] ---- io.quarkus - quarkus-jsonb-spi + quarkus-jackson-spi ---- -Add a build step to your processor to register the serializer via the `JsonbSerializerBuildItem`. +Add a build step to your processor to register a Jackson module via the `JacksonModuleBuildItem`. +You need to name your module in a unique way across all Jackson modules. [source%nowrap,java] ---- @BuildStep -JsonbSerializerBuildItem registerJsonbSerializer() { - return new JsonbSerializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdSerializer.class.getName())); +JacksonModuleBuildItem registerJacksonSerDeser() { + return new JacksonModuleBuildItem.Builder("ObjectIdModule") + .add(io.quarkus.mongodb.panache.jackson.ObjectIdSerializer.class.getName(), + io.quarkus.mongodb.panache.jackson.ObjectIdDeserializer.class.getName(), + ObjectId.class.getName()) + .build(); } ---- -The JSON-B extension will then use the produced build item to register your serializer/deserializer automatically. - -If you need more customization capabilities than registering a serializer or a deserializer, -you can produce a CDI bean that implements `io.quarkus.jsonb.JsonbConfigCustomizer` via an `AdditionalBeanBuildItem`. -More info about customizing JSON-B can be found on the JSON guide link:rest-json#configuring-json-support[Configuring JSON support] +The Jackson extension will then use the produced build item to register a module within Jackson automatically. -==== Customizing Jackson +If you need more customization capabilities than registering a module, +you can produce a CDI bean that implements `io.quarkus.jackson.ObjectMapperCustomizer` via an `AdditionalBeanBuildItem`. +More info about customizing Jackson can be found on the JSON guide link:rest-json#configuring-json-support[Configuring JSON support] -First, add an *optional* dependency to `quarkus-jackson` on your extension's runtime module. +==== Customizing JSON-B +First, add an *optional* dependency to `quarkus-jsonb` on your extension's runtime module. [source%nowrap,xml] ---- io.quarkus - quarkus-jackson + quarkus-jsonb true ---- -Then create a serializer or a deserializer (or both) for Jackson, an example of which can be seen in the `mongodb-panache` extension. +Then create a serializer and/or a deserializer for JSON-B, an example of which can be seen in the `mongodb-panache` extension. [source%nowrap,java] ---- -public class ObjectIdSerializer extends StdSerializer { - public ObjectIdSerializer() { - super(ObjectId.class); - } +public class ObjectIdSerializer implements JsonbSerializer { @Override - public void serialize(ObjectId objectId, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) - throws IOException { - if (objectId != null) { - jsonGenerator.writeString(objectId.toString()); + public void serialize(ObjectId obj, JsonGenerator generator, SerializationContext ctx) { + if (obj != null) { + generator.write(obj.toString()); } } } ---- -Add a dependency to `quarkus-jackson-spi` on your extension's deployment module. +Add a dependency to `quarkus-jsonb-spi` on your extension's deployment module. [source%nowrap,xml] ---- io.quarkus - quarkus-jackson-spi + quarkus-jsonb-spi ---- -Add a build step to your processor to register a Jackson module via the `JacksonModuleBuildItem`. -You need to name your module in a unique way across all Jackson modules. +Add a build step to your processor to register the serializer via the `JsonbSerializerBuildItem`. [source%nowrap,java] ---- @BuildStep -JacksonModuleBuildItem registerJacksonSerDeser() { - return new JacksonModuleBuildItem.Builder("ObjectIdModule") - .add(io.quarkus.mongodb.panache.jackson.ObjectIdSerializer.class.getName(), - io.quarkus.mongodb.panache.jackson.ObjectIdDeserializer.class.getName(), - ObjectId.class.getName()) - .build(); +JsonbSerializerBuildItem registerJsonbSerializer() { + return new JsonbSerializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdSerializer.class.getName())); } ---- -The Jackson extension will then use the produced build item to register a module within Jackson automatically. +The JSON-B extension will then use the produced build item to register your serializer/deserializer automatically. -If you need more customization capabilities than registering a module, -you can produce a CDI bean that implements `io.quarkus.jackson.ObjectMapperCustomizer` via an `AdditionalBeanBuildItem`. -More info about customizing Jackson can be found on the JSON guide link:rest-json#configuring-json-support[Configuring JSON support] +If you need more customization capabilities than registering a serializer or a deserializer, +you can produce a CDI bean that implements `io.quarkus.jsonb.JsonbConfigCustomizer` via an `AdditionalBeanBuildItem`. +More info about customizing JSON-B can be found on the JSON guide link:rest-json#configuring-json-support[Configuring JSON support] === Testing Extensions @@ -1812,7 +1870,7 @@ public class FailingUnitTest { @Test public void testHealthServlet() { - RestAssured.when().get("/health").then().statusCode(503); // <4> + RestAssured.when().get("/q/health").then().statusCode(503); // <4> } @Test @@ -2007,12 +2065,9 @@ A convenience feature that allows you to control most of the above features from `io.quarkus.deployment.builditem.NativeImageEnableAllCharsetsBuildItem`:: Indicates that all charsets should be enabled in native image. -`io.quarkus.deployment.builditem.NativeImageEnableAllTimeZonesBuildItem`:: -Indicates that all timezones should be enabled in native image. - `io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem`:: A convenient way to tell Quarkus that the extension requires SSL and it should be enabled during native image build. -When using this feature, remember to add your extension to the list of extensions that offer SSL support automatically on the https://github.com/quarkusio/quarkus/blob/master/docs/src/main/asciidoc/native-and-ssl.adoc[native and ssl guide]. +When using this feature, remember to add your extension to the list of extensions that offer SSL support automatically on the https://github.com/quarkusio/quarkus/blob/main/docs/src/main/asciidoc/native-and-ssl.adoc[native and ssl guide]. === IDE support tips @@ -2027,11 +2082,18 @@ The only particular aspect of writing Quarkus extensions in Eclipse is that APT === Troubleshooting / Debugging Tips -==== Dump the Generated Classes to the File System +// This id was previously used for the "Dump the Generated Classes to the File System" section +[[dump-the-generated-classes-to-the-file-system]] +==== Inspecting the Generated/Transformed Classes + +Quarkus generates a lot of classes during the build phase and in many cases also transforms existing classes. +It is often extremely useful to see the generated bytecode and transformed classes during the development of an extension. -During the augmentation phase Quarkus extensions generate new and modify existing classes for various purposes. -Sometimes you need to inspect the generated bytecode to debug or understand an issue. -There are three system properties that allow you to dump the classes to the filesystem: +If you set the `quarkus.package.fernflower.enabled` property to `true` then Quarkus will download and invoke the https://github.com/JetBrains/intellij-community/tree/master/plugins/java-decompiler/engine[Fernflower decompiler] and dump the result in the `decompiled` directory of the build tool output (`target/decompiled` for Maven for example). + +NOTE: This property only works during a normal production build (i.e. not for dev mode/tests) and when `fast-jar` packaging type is used (the default behavior). + +There are also three system properties that allow you to dump the generated/transformed classes to the filesystem and inspect them later, for example via a decompiler in your IDE. - `quarkus.debug.generated-classes-dir` - to dump the generated classes, such as bean metadata - `quarkus.debug.transformed-classes-dir` - to dump the transformed classes, e.g. Panache entities @@ -2041,7 +2103,7 @@ These properties are especially useful in the development mode or when running t For example, you can specify the `quarkus.debug.generated-classes-dir` system property to have these classes written out to disk for inspection in the development mode: -[source,shell] +[source,bash] ---- ./mvnw quarkus:dev -Dquarkus.debug.generated-classes-dir=dump-classes ---- @@ -2050,28 +2112,27 @@ NOTE: The property value could be either an absolute path, such as `/home/foo/du You should see a line in the log for each class written to the directory: -[source,shell] +[source,text] ---- INFO [io.qua.run.boo.StartupActionImpl] (main) Wrote /path/to/my/app/target/dump-classes/io/quarkus/arc/impl/ActivateRequestContextInterceptor_Bean.class ---- The property is also honored when running tests: -[source,shell] +[source,bash] ---- ./mvnw clean test -Dquarkus.debug.generated-classes-dir=target/dump-generated-classes ---- Analogously, you can use the `quarkus.debug.transformed-classes-dir` and `quarkus.debug.transformed-classes-dir` properties to dump the relevant output. - ==== Multi-module Maven Projects and the Development Mode It's not uncommon to develop an extension in a multi-module Maven project that also contains an "example" module. However, if you want to run the example in the development mode then the `-DnoDeps` system property must be used in order to exclude the local project dependencies. Otherwise, Quarkus attempts to monitor the extension classes and this may result in weird class loading issues. -[source,shell] +[source,bash] ---- ./mvnw compile quarkus:dev -DnoDeps ---- @@ -2092,7 +2153,7 @@ A _feature_ represents a functionality provided by an extension. The name of the feature gets displayed in the log during application bootstrap. .Example Startup Lines -[source, bash] +[source,text] ---- 2019-03-22 14:02:37,884 INFO [io.quarkus] (main) Quarkus 999-SNAPSHOT started in 0.061s. 2019-03-22 14:02:37,884 INFO [io.quarkus] (main) Installed features: [cdi, test-extension] <1> @@ -2116,16 +2177,16 @@ If multiple extensions register a feature of the same name the build fails. The feature name should also map to a label in the extension's `devtools/common/src/main/filtered/extensions.json` entry so that the feature name displayed by the startup line matches a label that one can used to select the extension when creating a project -using the Quarkus maven plugin as shown in this example taken from the link:rest-json[Writing JSON REST Services] guide where the `resteasy-jsonb` feature is referenced: +using the Quarkus maven plugin as shown in this example taken from the link:rest-json[Writing JSON REST Services] guide where the `resteasy-jackson` feature is referenced: -[source,shell,subs=attributes+] +[source,bash,subs=attributes+] ---- mvn io.quarkus:quarkus-maven-plugin:{quarkus-version}:create \ -DprojectGroupId=org.acme \ -DprojectArtifactId=rest-json \ -DclassName="org.acme.rest.json.FruitResource" \ -Dpath="/fruits" \ - -Dextensions="resteasy-jsonb" + -Dextensions="resteasy,resteasy-jackson" cd rest-json ---- @@ -2134,6 +2195,7 @@ cd rest-json A _capability_ represents a technical capability that can be queried by other extensions. An extension may provide multiple capabilities and multiple extensions can provide the same capability. By default, capabilities are not displayed to users. +Capabilities should be used when checking for the presence of an extension rather than class path based checks. Capabilities can be registered in a <> method that produces a `CapabilityBuildItem`: @@ -2166,7 +2228,7 @@ Capabilities provided by core extensions should be listed in the `io.quarkus.dep ==== Bean Defining Annotations The CDI layer processes CDI beans that are either explicitly registered or that it discovers based on bean defining annotations as defined in http://docs.jboss.org/cdi/spec/2.0/cdi-spec.html#bean_defining_annotations[2.5.1. Bean defining annotations]. You can expand this set of annotations to include annotations your extension processes using a `BeanDefiningAnnotationBuildItem` as shown in this `TestProcessor#registerBeanDefinningAnnotations` example: -.Register a Bean Definining Annotation +.Register a Bean Defining Annotation [source,java] ---- import javax.enterprise.context.ApplicationScoped; @@ -2270,7 +2332,7 @@ If your extension defines annotations or interfaces that mark beans needing to b .stream() .anyMatch(dotName -> dotName.equals(DotName.createSimple(IConfigConsumer.class.getName()))); <4> if (isConfigConsumer) { - Class beanClass = (Class) Class.forName(beanClassInfo.name().toString()); + Class beanClass = (Class) Class.forName(beanClassInfo.name().toString(), false, Thread.currentThread().getContextClassLoader()); testBeanProducer.produce(new TestBeanBuildItem(beanClass)); <5> log.infof("Configured bean: %s", beanClass); } @@ -2418,7 +2480,7 @@ and shutdown. There are CDI events fired that components can observe are illustr import io.quarkus.runtime.ShutdownEvent; import io.quarkus.runtime.StartupEvent; -puclic class SomeBean { +public class SomeBean { /** * Called when the runtime has started * @param event @@ -2437,7 +2499,7 @@ puclic class SomeBean { } ---- <1> Observe a `StartupEvent` to be notified the runtime has started. -<2> Observe a 'ShutdownEvent` to be notified when the runtime is going to shutdown. +<2> Observe a `ShutdownEvent` to be notified when the runtime is going to shutdown. What is the relevance of startup and shutdown events for extension authors? We have already seen the use of a `ShutdownContext` to register a callback to perform shutdown tasks in the <> section. These shutdown tasks would be called @@ -2611,7 +2673,7 @@ public class SomeServiceRecorder { Objects created during the build phase that are passed into the runtime need to have a default constructor in order for them to be created and configured at startup of the runtime from the build time state. If an object does not have a default constructor you will see an error similar to the following during generation of the augmented artifacts: .DSAPublicKey Serialization Error -[source,bash] +[source,text] ---- [error]: Build step io.quarkus.deployment.steps.MainClassBuildStep#build threw an exception: java.lang.RuntimeException: Unable to serialize objects of type class sun.security.provider.DSAPublicKeyImpl to bytecode as it has no default constructor at io.quarkus.builder.Execution.run(Execution.java:123) @@ -2670,7 +2732,7 @@ public class DSAPublicKeyObjectSubstitution implements ObjectSubstitution` configuration knob. -It should be added to the `native-image` configuration using an `` as shown in the examples above. +It should be added to the `native-image` configuration using the `quarkus.native.additional-build-args` configuration property as shown in the examples above. [NOTE] ==== -You can find more information about all this in https://github.com/oracle/graal/blob/master/substratevm/CLASS-INITIALIZATION.md[the GraalVM documentation]. +You can find more information about all this in https://github.com/oracle/graal/blob/master/substratevm/ClassInitialization.md[the GraalVM documentation]. ==== [NOTE] @@ -270,10 +279,17 @@ You can find more information about all this in https://github.com/oracle/graal/ When multiple classes or packages need to be specified via the `quarkus.native.additional-build-args` configuration property, the `,` symbol needs to be escaped. An example of this is the following: -[source] +[source,properties] ---- quarkus.native.additional-build-args=--initialize-at-run-time=com.example.SomeClass\\,org.acme.SomeOtherClass ---- + +and in the case of using the Maven configuration instead of `application.properties`: + +[source,xml] +---- +--initialize-at-run-time=com.example.SomeClass\,org.acme.SomeOtherClass +---- ==== === Managing Proxy Classes @@ -288,7 +304,7 @@ com.oracle.svm.core.jdk.UnsupportedFeatureError: Proxy class defined by interfac ---- Solving this issue requires adding the `-H:DynamicProxyConfigurationResources=` option and to provide a dynamic proxy configuration file. -You can find all the information about the format of this file in https://github.com/oracle/graal/blob/master/substratevm/DYNAMIC_PROXY.md#manual-configuration[the GraalVM documentation]. +You can find all the information about the format of this file in https://github.com/oracle/graal/blob/master/substratevm/DynamicProxy.md#manual-configuration[the GraalVM documentation]. [[native-in-extension]] == Supporting native in a Quarkus extension @@ -321,13 +337,9 @@ public class SaxParserProcessor { [NOTE] ==== -More information about reflection in GraalVM can be found https://github.com/oracle/graal/blob/master/substratevm/REFLECTION.md[here]. +More information about reflection in GraalVM can be found https://github.com/oracle/graal/blob/master/substratevm/Reflection.md[here]. ==== -=== Alternative with @RegisterForReflection - -As for applications, you can also use the `@RegisterForReflection` annotation if the class is in your extension and not in a third-party jar. - === Including resources In the context of an extension, Quarkus eliminates the need for a JSON configuration file by allowing extension authors to specify a `NativeImageResourceBuildItem`: @@ -346,7 +358,7 @@ public class ResourcesProcessor { [NOTE] ==== -For more information about GraalVM resource handling in native executables please refer to https://github.com/oracle/graal/blob/master/substratevm/RESOURCES.md[the GraalVM documentation]. +For more information about GraalVM resource handling in native executables please refer to https://github.com/oracle/graal/blob/master/substratevm/Resources.md[the GraalVM documentation]. ==== @@ -370,7 +382,7 @@ Using such a construct means that a `--initialize-at-run-time` option will autom [NOTE] ==== -For more information about `--initialize-at-run-time`, please read https://github.com/oracle/graal/blob/master/substratevm/CLASS-INITIALIZATION.md[the GraalVM documentation]. +For more information about `--initialize-at-run-time`, please read https://github.com/oracle/graal/blob/master/substratevm/ClassInitialization.md[the GraalVM documentation]. ==== === Managing Proxy Classes @@ -394,7 +406,7 @@ Using such a construct means that a `-H:DynamicProxyConfigurationResources` opti [NOTE] ==== -For more information about Proxy Classes you can read https://github.com/oracle/graal/blob/master/substratevm/DYNAMIC_PROXY.md[the GraalVM documentation]. +For more information about Proxy Classes you can read https://github.com/oracle/graal/blob/master/substratevm/DynamicProxy.md[the GraalVM documentation]. ==== === Logging with Native Image diff --git a/docs/src/main/java/io/quarkus/docs/generation/AllConfigGenerator.java b/docs/src/main/java/io/quarkus/docs/generation/AllConfigGenerator.java index f82e376e3f8e0..5fb7b4c999604 100644 --- a/docs/src/main/java/io/quarkus/docs/generation/AllConfigGenerator.java +++ b/docs/src/main/java/io/quarkus/docs/generation/AllConfigGenerator.java @@ -21,10 +21,6 @@ import org.eclipse.aether.resolution.ArtifactRequest; import org.eclipse.aether.resolution.ArtifactResult; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategy; - import io.quarkus.annotation.processor.generate_doc.ConfigDocGeneratedOutput; import io.quarkus.annotation.processor.generate_doc.ConfigDocItem; import io.quarkus.annotation.processor.generate_doc.ConfigDocItemScanner; @@ -33,9 +29,18 @@ import io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; -import io.quarkus.docs.generation.ExtensionJson.Extension; +import io.quarkus.maven.ArtifactCoords; +import io.quarkus.registry.catalog.Extension; +import io.quarkus.registry.catalog.json.JsonCatalogMapperHelper; +import io.quarkus.registry.catalog.json.JsonExtensionCatalog; public class AllConfigGenerator { + + public static Artifact toAetherArtifact(ArtifactCoords coords) { + return new DefaultArtifact(coords.getGroupId(), coords.getArtifactId(), coords.getClassifier(), coords.getType(), + coords.getVersion()); + } + public static void main(String[] args) throws BootstrapMavenException, IOException { if (args.length != 2) { // exit 1 will break Maven @@ -51,30 +56,26 @@ public static void main(String[] args) throws BootstrapMavenException, IOExcepti // exit 0 will break Maven return; } - ObjectMapper mapper = new ObjectMapper() - .enable(JsonParser.Feature.ALLOW_COMMENTS) - .enable(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS) - .setPropertyNamingStrategy(PropertyNamingStrategy.KEBAB_CASE); MavenArtifactResolver resolver = MavenArtifactResolver.builder().setWorkspaceDiscovery(false).build(); - // let's read it (and ignore the fields we don't need) - ExtensionJson extensionJson = mapper.readValue(jsonFile, ExtensionJson.class); + final JsonExtensionCatalog extensionJson = JsonCatalogMapperHelper.deserialize(jsonFile.toPath(), + JsonExtensionCatalog.class); // now get all the listed extension jars via Maven - List requests = new ArrayList<>(extensionJson.extensions.size()); + List requests = new ArrayList<>(extensionJson.getExtensions().size()); Map extensionsByGav = new HashMap<>(); Map extensionsByConfigRoots = new HashMap<>(); - for (Extension extension : extensionJson.extensions) { + for (Extension extension : extensionJson.getExtensions()) { ArtifactRequest request = new ArtifactRequest(); - Artifact artifact = new DefaultArtifact(extension.groupId, extension.artifactId, "jar", version); + Artifact artifact = toAetherArtifact(extension.getArtifact()); request.setArtifact(artifact); requests.add(request); // record the extension for this GAV - extensionsByGav.put(extension.groupId + ":" + extension.artifactId, extension); + extensionsByGav.put(artifact.getGroupId() + ":" + artifact.getArtifactId(), extension); } // examine all the extension jars - List deploymentRequests = new ArrayList<>(extensionJson.extensions.size()); + List deploymentRequests = new ArrayList<>(extensionJson.getExtensions().size()); for (ArtifactResult result : resolver.resolve(requests)) { Artifact artifact = result.getArtifact(); // which extension was this for? @@ -130,24 +131,26 @@ public static void main(String[] args) throws BootstrapMavenException, IOExcepti // Temporary fix for https://github.com/quarkusio/quarkus/issues/5214 until we figure out how to fix it Extension openApi = extensionsByGav.get("io.quarkus:quarkus-smallrye-openapi"); - if (openApi != null) + if (openApi != null) { extensionsByConfigRoots.put("io.quarkus.smallrye.openapi.common.deployment.SmallRyeOpenApiConfig", openApi); + } // sort extensions by name, assign their config items based on their config roots for (Entry entry : extensionsByConfigRoots.entrySet()) { List items = docItemsByConfigRoots.get(entry.getKey()); if (items != null) { - String extensionName = entry.getValue().name; + String extensionName = entry.getValue().getName(); if (extensionName == null) { - String extensionGav = entry.getValue().groupId + ":" + entry.getValue().artifactId; // compute the docs file name for this extension String docFileName = DocGeneratorUtil.computeExtensionDocFileName(entry.getKey()); // now approximate an extension file name based on it extensionName = guessExtensionNameFromDocumentationFileName(docFileName); - System.err.println("WARNING: Extension name missing for " + extensionGav + " using guessed extension name: " - + extensionName); + System.err.println("WARNING: Extension name missing for " + + (entry.getValue().getArtifact().getGroupId() + ":" + + entry.getValue().getArtifact().getArtifactId()) + + " using guessed extension name: " + extensionName); } - artifactIdsByName.put(extensionName, entry.getValue().artifactId); + artifactIdsByName.put(extensionName, entry.getValue().getArtifact().getArtifactId()); List existingConfigDocItems = sortedConfigItemsByExtension.get(extensionName); if (existingConfigDocItems != null) { DocGeneratorUtil.appendConfigItemsIntoExistingOnes(existingConfigDocItems, items); diff --git a/docs/src/main/java/io/quarkus/docs/generation/ExtensionJson.java b/docs/src/main/java/io/quarkus/docs/generation/ExtensionJson.java deleted file mode 100644 index c81ae58911924..0000000000000 --- a/docs/src/main/java/io/quarkus/docs/generation/ExtensionJson.java +++ /dev/null @@ -1,15 +0,0 @@ -package io.quarkus.docs.generation; - -import java.util.List; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class ExtensionJson { - public List extensions; - - @JsonIgnoreProperties(ignoreUnknown = true) - public static class Extension { - public String name, groupId, artifactId; - } -} \ No newline at end of file diff --git a/docs/sync-web-site.sh b/docs/sync-web-site.sh index a246f37b7f6cf..ce6beead98300 100755 --- a/docs/sync-web-site.sh +++ b/docs/sync-web-site.sh @@ -14,7 +14,7 @@ rsync -vr --delete \ --exclude='**/index.adoc' \ --exclude='**/attributes.adoc' \ ../target/asciidoc/generated/ \ - target/web-site/_generated-config + target/web-site/_generated-config/latest echo "Sync done!" echo "==========" diff --git a/extensions/agroal/deployment/pom.xml b/extensions/agroal/deployment/pom.xml index e3e4440556a56..a31bf2739534f 100644 --- a/extensions/agroal/deployment/pom.xml +++ b/extensions/agroal/deployment/pom.xml @@ -6,7 +6,6 @@ quarkus-agroal-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 @@ -46,10 +45,7 @@ io.quarkus quarkus-smallrye-health-spi - - io.quarkus - quarkus-smallrye-metrics-spi - + io.quarkus @@ -61,6 +57,11 @@ rest-assured test + + org.assertj + assertj-core + test + io.quarkus quarkus-junit5-internal @@ -78,7 +79,7 @@ io.quarkus - quarkus-smallrye-metrics + quarkus-smallrye-metrics-deployment test diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AggregatedDataSourceBuildTimeConfigBuildItem.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AggregatedDataSourceBuildTimeConfigBuildItem.java index 342c9f942685e..8e5c66c27137b 100644 --- a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AggregatedDataSourceBuildTimeConfigBuildItem.java +++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AggregatedDataSourceBuildTimeConfigBuildItem.java @@ -1,7 +1,6 @@ package io.quarkus.agroal.deployment; import io.quarkus.agroal.runtime.DataSourceJdbcBuildTimeConfig; -import io.quarkus.agroal.runtime.LegacyDataSourceJdbcBuildTimeConfig; import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.datasource.runtime.DataSourceBuildTimeConfig; @@ -14,25 +13,19 @@ final class AggregatedDataSourceBuildTimeConfigBuildItem extends MultiBuildItem private final DataSourceJdbcBuildTimeConfig jdbcConfig; - private final LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcConfig; - - private final String resolvedDbKind; + private final String dbKind; private final String resolvedDriverClass; - private boolean legacy; - AggregatedDataSourceBuildTimeConfigBuildItem(String name, DataSourceBuildTimeConfig dataSourceConfig, DataSourceJdbcBuildTimeConfig jdbcConfig, - LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcConfig, String resolvedDbKind, - String resolvedDriverClass, boolean legacy) { + String dbKind, + String resolvedDriverClass) { this.name = name; this.dataSourceConfig = dataSourceConfig; this.jdbcConfig = jdbcConfig; - this.legacyDataSourceJdbcConfig = legacyDataSourceJdbcConfig; - this.resolvedDbKind = resolvedDbKind; + this.dbKind = dbKind; this.resolvedDriverClass = resolvedDriverClass; - this.legacy = legacy; } public String getName() { @@ -47,23 +40,15 @@ public DataSourceJdbcBuildTimeConfig getJdbcConfig() { return jdbcConfig; } - public LegacyDataSourceJdbcBuildTimeConfig getLegacyDataSourceJdbcConfig() { - return legacyDataSourceJdbcConfig; - } - public boolean isDefault() { return DataSourceUtil.isDefault(name); } - public String getResolvedDbKind() { - return resolvedDbKind; + public String getDbKind() { + return dbKind; } public String getResolvedDriverClass() { return resolvedDriverClass; } - - public boolean isLegacy() { - return legacy; - } } diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalMetricsProcessor.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalMetricsProcessor.java new file mode 100644 index 0000000000000..cc7f79e6cc31c --- /dev/null +++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalMetricsProcessor.java @@ -0,0 +1,33 @@ +package io.quarkus.agroal.deployment; + +import java.util.List; + +import io.quarkus.agroal.runtime.metrics.AgroalMetricsRecorder; +import io.quarkus.datasource.runtime.DataSourcesBuildTimeConfig; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.metrics.MetricsFactoryConsumerBuildItem; + +public class AgroalMetricsProcessor { + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void registerMetrics(AgroalMetricsRecorder recorder, + DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, + BuildProducer datasourceMetrics, + List aggregatedDataSourceBuildTimeConfigs) { + + for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedDataSourceBuildTimeConfigs) { + // Create a MetricsFactory consumer to register metrics for a data source + // IFF metrics are enabled globally and for the data source + // (they are enabled for each data source by default if they are also enabled globally) + if (dataSourcesBuildTimeConfig.metricsEnabled && + aggregatedDataSourceBuildTimeConfig.getJdbcConfig().enableMetrics.orElse(true)) { + datasourceMetrics.produce(new MetricsFactoryConsumerBuildItem( + recorder.registerDataSourceMetrics(aggregatedDataSourceBuildTimeConfig.getName()))); + } + } + } +} diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java index 5229cc5c98ee7..d953511a950b9 100644 --- a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java +++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java @@ -12,10 +12,6 @@ import javax.inject.Singleton; import javax.sql.XADataSource; -import org.eclipse.microprofile.metrics.Metadata; -import org.eclipse.microprofile.metrics.MetricType; -import org.eclipse.microprofile.metrics.MetricUnits; -import org.eclipse.microprofile.metrics.Tag; import org.jboss.jandex.DotName; import org.jboss.logging.Logger; @@ -27,16 +23,14 @@ import io.quarkus.agroal.runtime.DataSourceSupport; import io.quarkus.agroal.runtime.DataSources; import io.quarkus.agroal.runtime.DataSourcesJdbcBuildTimeConfig; -import io.quarkus.agroal.runtime.LegacyDataSourceJdbcBuildTimeConfig; -import io.quarkus.agroal.runtime.LegacyDataSourcesJdbcBuildTimeConfig; import io.quarkus.agroal.runtime.TransactionIntegration; -import io.quarkus.agroal.runtime.metrics.AgroalCounter; -import io.quarkus.agroal.runtime.metrics.AgroalGauge; +import io.quarkus.agroal.spi.JdbcDataSourceBuildItem; +import io.quarkus.agroal.spi.JdbcDriverBuildItem; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.processor.DotNames; import io.quarkus.datasource.common.runtime.DataSourceUtil; -import io.quarkus.datasource.common.runtime.DatabaseKind; +import io.quarkus.datasource.deployment.spi.DefaultDataSourceDbKindBuildItem; import io.quarkus.datasource.runtime.DataSourceBuildTimeConfig; import io.quarkus.datasource.runtime.DataSourcesBuildTimeConfig; import io.quarkus.datasource.runtime.DataSourcesRuntimeConfig; @@ -53,9 +47,9 @@ import io.quarkus.deployment.builditem.SslNativeConfigBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem; -import io.quarkus.smallrye.metrics.deployment.spi.MetricBuildItem; @SuppressWarnings("deprecation") class AgroalProcessor { @@ -64,10 +58,6 @@ class AgroalProcessor { private static final DotName DATA_SOURCE = DotName.createSimple(javax.sql.DataSource.class.getName()); - private static final String QUARKUS_DATASOURCE_CONFIG_PREFIX = "quarkus.datasource."; - private static final String QUARKUS_DATASOURCE_DB_KIND_CONFIG_NAME = "db-kind"; - private static final String QUARKUS_DATASOURCE_DRIVER_CONFIG_NAME = "driver"; - @BuildStep void agroal(BuildProducer feature, BuildProducer capability) { @@ -79,17 +69,23 @@ void agroal(BuildProducer feature, void build( DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, - LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig, + List defaultDbKinds, List jdbcDriverBuildItems, BuildProducer reflectiveClass, BuildProducer resource, BuildProducer sslNativeSupport, - BuildProducer aggregatedConfig) throws Exception { + BuildProducer aggregatedConfig, + CurateOutcomeBuildItem curateOutcomeBuildItem) throws Exception { + if (dataSourcesBuildTimeConfig.driver.isPresent() || dataSourcesBuildTimeConfig.url.isPresent()) { + throw new ConfigurationException( + "quarkus.datasource.url and quarkus.datasource.driver have been deprecated in Quarkus 1.3 and removed in 1.9. " + + "Please use the new datasource configuration as explained in https://quarkus.io/guides/datasource."); + } + List aggregatedDataSourceBuildTimeConfigs = getAggregatedConfigBuildItems( dataSourcesBuildTimeConfig, - dataSourcesJdbcBuildTimeConfig, - legacyDataSourcesJdbcBuildTimeConfig, - jdbcDriverBuildItems); + dataSourcesJdbcBuildTimeConfig, curateOutcomeBuildItem, + jdbcDriverBuildItems, defaultDbKinds); if (aggregatedDataSourceBuildTimeConfigs.isEmpty()) { log.warn("The Agroal dependency is present but no JDBC datasources have been defined."); @@ -171,9 +167,9 @@ private DataSourceSupport getDataSourceSupport( for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedBuildTimeConfigBuildItems) { String dataSourceName = aggregatedDataSourceBuildTimeConfig.getName(); dataSourceSupportEntries.put(dataSourceName, - new DataSourceSupport.Entry(dataSourceName, aggregatedDataSourceBuildTimeConfig.getResolvedDbKind(), + new DataSourceSupport.Entry(dataSourceName, aggregatedDataSourceBuildTimeConfig.getDbKind(), aggregatedDataSourceBuildTimeConfig.getResolvedDriverClass(), - aggregatedDataSourceBuildTimeConfig.isLegacy(), aggregatedDataSourceBuildTimeConfig.isDefault())); + aggregatedDataSourceBuildTimeConfig.isDefault())); } return new DataSourceSupport(sslNativeConfig.isExplicitlyDisabled(), @@ -263,21 +259,21 @@ void generateDataSourceBeans(AgroalRecorder recorder, private List getAggregatedConfigBuildItems( DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, - LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig, - List jdbcDriverBuildItems) { + CurateOutcomeBuildItem curateOutcomeBuildItem, + List jdbcDriverBuildItems, List defaultDbKinds) { List dataSources = new ArrayList<>(); - // New configuration - if (dataSourcesBuildTimeConfig.defaultDataSource.dbKind.isPresent()) { + Optional effectiveDbKind = DefaultDataSourceDbKindBuildItem + .resolve(dataSourcesBuildTimeConfig.defaultDataSource.dbKind, defaultDbKinds, curateOutcomeBuildItem); + + if (effectiveDbKind.isPresent()) { if (dataSourcesJdbcBuildTimeConfig.jdbc.enabled) { dataSources.add(new AggregatedDataSourceBuildTimeConfigBuildItem(DataSourceUtil.DEFAULT_DATASOURCE_NAME, dataSourcesBuildTimeConfig.defaultDataSource, dataSourcesJdbcBuildTimeConfig.jdbc, - null, - dataSourcesBuildTimeConfig.defaultDataSource.dbKind.get(), - resolveDriver(null, dataSourcesBuildTimeConfig.defaultDataSource, - dataSourcesJdbcBuildTimeConfig.jdbc, jdbcDriverBuildItems), - false)); + effectiveDbKind.get(), + resolveDriver(DataSourceUtil.DEFAULT_DATASOURCE_NAME, effectiveDbKind.get(), + dataSourcesJdbcBuildTimeConfig.jdbc, jdbcDriverBuildItems))); } } for (Entry entry : dataSourcesBuildTimeConfig.namedDataSources.entrySet()) { @@ -287,102 +283,29 @@ private List getAggregatedConfigBu if (!jdbcBuildTimeConfig.enabled) { continue; } + Optional dbKind = DefaultDataSourceDbKindBuildItem + .resolve(entry.getValue().dbKind, defaultDbKinds, curateOutcomeBuildItem); + if (!dbKind.isPresent()) { + continue; + } dataSources.add(new AggregatedDataSourceBuildTimeConfigBuildItem(entry.getKey(), entry.getValue(), jdbcBuildTimeConfig, - null, - entry.getValue().dbKind.get(), - resolveDriver(entry.getKey(), entry.getValue(), jdbcBuildTimeConfig, jdbcDriverBuildItems), - false)); - } - - // Legacy configuration - if (legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource.driver.isPresent()) { - String resolvedDbKind = resolveLegacyKind(legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource.driver.get()); - boolean alreadyConfigured = ensureNoConfigurationClash(dataSources, DataSourceUtil.DEFAULT_DATASOURCE_NAME, - resolvedDbKind); - - if (!alreadyConfigured) { - dataSources.add(new AggregatedDataSourceBuildTimeConfigBuildItem(DataSourceUtil.DEFAULT_DATASOURCE_NAME, - dataSourcesBuildTimeConfig.defaultDataSource, - dataSourcesJdbcBuildTimeConfig.jdbc, - legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource, - resolvedDbKind, - legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource.driver.get(), - true)); - } - } - for (Entry entry : legacyDataSourcesJdbcBuildTimeConfig.namedDataSources - .entrySet()) { - String datasourceName = entry.getKey(); - DataSourceBuildTimeConfig dataSourceBuildTimeConfig = dataSourcesBuildTimeConfig.namedDataSources - .containsKey(datasourceName) ? dataSourcesBuildTimeConfig.namedDataSources.get(datasourceName) - : new DataSourceBuildTimeConfig(); - DataSourceJdbcBuildTimeConfig jdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources - .containsKey(datasourceName) ? dataSourcesJdbcBuildTimeConfig.namedDataSources.get(datasourceName).jdbc - : new DataSourceJdbcBuildTimeConfig(); - - String resolvedDbKind = resolveLegacyKind(entry.getValue().driver.get()); - boolean alreadyConfigured = ensureNoConfigurationClash(dataSources, datasourceName, resolvedDbKind); - - if (!alreadyConfigured) { - dataSources.add(new AggregatedDataSourceBuildTimeConfigBuildItem(datasourceName, - dataSourceBuildTimeConfig, - jdbcBuildTimeConfig, - entry.getValue(), - resolvedDbKind, - entry.getValue().driver.get(), - true)); - } + dbKind.get(), + resolveDriver(entry.getKey(), dbKind.get(), jdbcBuildTimeConfig, jdbcDriverBuildItems))); } return dataSources; } - private boolean ensureNoConfigurationClash(List dataSources, - String datasourceName, String resolvedDbKind) { - - boolean alreadyConfigured = false; - for (AggregatedDataSourceBuildTimeConfigBuildItem alreadyConfiguredDataSource : dataSources) { - if (alreadyConfiguredDataSource.getName().equals(datasourceName)) { - if (!alreadyConfiguredDataSource.getResolvedDbKind().equals(resolvedDbKind)) { - throw new RuntimeException("Incompatible values detected between " - + quotedDataSourcePropertyName(datasourceName, QUARKUS_DATASOURCE_DB_KIND_CONFIG_NAME) + " and " - + quotedDataSourcePropertyName(datasourceName, QUARKUS_DATASOURCE_DRIVER_CONFIG_NAME) - + ". Consider removing the latter."); - } - alreadyConfigured = true; - } - } - if (alreadyConfigured) { - log.warn("Configuring " + quotedDataSourcePropertyName(datasourceName, QUARKUS_DATASOURCE_DRIVER_CONFIG_NAME) - + " is redundant when " - + quotedDataSourcePropertyName(datasourceName, QUARKUS_DATASOURCE_DB_KIND_CONFIG_NAME) - + " is also configured"); - } - - return alreadyConfigured; - } - - private String quotedDataSourcePropertyName(String datasourceName, String propertyName) { - return "\"" + dataSourcePropertyName(datasourceName, propertyName) + "\""; - } - - private String dataSourcePropertyName(String datasourceName, String propertyName) { - if (DataSourceUtil.DEFAULT_DATASOURCE_NAME.equals(datasourceName)) { - return QUARKUS_DATASOURCE_CONFIG_PREFIX + propertyName; - } - return QUARKUS_DATASOURCE_CONFIG_PREFIX + datasourceName + "." + propertyName; - } - - private String resolveDriver(String dataSourceName, DataSourceBuildTimeConfig dataSourceBuildTimeConfig, + private String resolveDriver(String dataSourceName, String dbKind, DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, List jdbcDriverBuildItems) { if (dataSourceJdbcBuildTimeConfig.driver.isPresent()) { return dataSourceJdbcBuildTimeConfig.driver.get(); } Optional matchingJdbcDriver = jdbcDriverBuildItems.stream() - .filter(i -> dataSourceBuildTimeConfig.dbKind.get().equals(i.getDbKind())) + .filter(i -> dbKind.equals(i.getDbKind())) .findFirst(); if (matchingJdbcDriver.isPresent()) { @@ -395,33 +318,14 @@ private String resolveDriver(String dataSourceName, DataSourceBuildTimeConfig da } } - throw new ConfigurationException("Unable to determine the driver for " + (dataSourceName == null ? "default datasource" - : "datasource named " + dataSourceName)); - } - - private String resolveLegacyKind(String driver) { - switch (driver) { - case "org.apache.derby.jdbc.ClientDriver": - case "org.apache.derby.jdbc.ClientXADataSource": - return DatabaseKind.DERBY; - case "org.h2.Driver": - case "org.h2.jdbcx.JdbcDataSource": - return DatabaseKind.H2; - case "org.mariadb.jdbc.Driver": - case "org.mariadb.jdbc.MySQLDataSource": - return DatabaseKind.MARIADB; - case "com.microsoft.sqlserver.jdbc.SQLServerDriver": - case "com.microsoft.sqlserver.jdbc.SQLServerXADataSource": - return DatabaseKind.MSSQL; - case "com.mysql.cj.jdbc.Driver": - case "com.mysql.cj.jdbc.MysqlXADataSource": - return DatabaseKind.MYSQL; - case "org.postgresql.Driver": - case "org.postgresql.xa.PGXADataSource": - return DatabaseKind.POSTGRESQL; - } - - return "other-legacy"; + throw new ConfigurationException("Unable to find a JDBC driver corresponding to the database kind '" + + dbKind + "' for the " + + (DataSourceUtil.isDefault(dataSourceName) ? "default datasource" + : "datasource '" + dataSourceName + "'") + + ". Either provide a suitable JDBC driver extension, define the driver manually, or disable the JDBC datasource by adding " + + (DataSourceUtil.isDefault(dataSourceName) ? "'quarkus.datasource.jdbc=false'" + : "'quarkus.datasource." + dataSourceName + ".jdbc=false'") + + " to your configuration if you don't need it."); } @BuildStep @@ -429,197 +333,4 @@ HealthBuildItem addHealthCheck(DataSourcesBuildTimeConfig dataSourcesBuildTimeCo return new HealthBuildItem("io.quarkus.agroal.runtime.health.DataSourceHealthCheck", dataSourcesBuildTimeConfig.healthEnabled); } - - @BuildStep - void registerMetrics( - DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, - List aggregatedDataSourceBuildTimeConfigs, - BuildProducer metrics) { - Metadata activeCountMetadata = Metadata.builder() - .withName("agroal.active.count") - .withDescription("Number of active connections. These connections are in use and not available to be acquired.") - .withType(MetricType.GAUGE) - .build(); - Metadata availableCountMetadata = Metadata.builder() - .withName("agroal.available.count") - .withDescription("Number of idle connections in the pool, available to be acquired.") - .withType(MetricType.GAUGE) - .build(); - Metadata maxUsedCountMetadata = Metadata.builder() - .withName("agroal.max.used.count") - .withDescription("Maximum number of connections active simultaneously.") - .withType(MetricType.GAUGE) - .build(); - Metadata awaitingCountMetadata = Metadata.builder() - .withName("agroal.awaiting.count") - .withDescription("Approximate number of threads blocked, waiting to acquire a connection.") - .withType(MetricType.GAUGE) - .build(); - Metadata blockingTimeAverageMetadata = Metadata.builder() - .withName("agroal.blocking.time.average") - .withDescription("Average time an application waited to acquire a connection.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata blockingTimeMaxMetadata = Metadata.builder() - .withName("agroal.blocking.time.max") - .withDescription("Maximum time an application waited to acquire a connection.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata blockingTimeTotalMetadata = Metadata.builder() - .withName("agroal.blocking.time.total") - .withDescription("Total time applications waited to acquire a connection.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata creationTimeAverageMetadata = Metadata.builder() - .withName("agroal.creation.time.average") - .withDescription("Average time for a connection to be created.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata creationTimeMaxMetadata = Metadata.builder() - .withName("agroal.creation.time.max") - .withDescription("Maximum time for a connection to be created.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata creationTimeTotalMetadata = Metadata.builder() - .withName("agroal.creation.time.total") - .withDescription("Total time waiting for connections to be created.") - .withUnit(MetricUnits.MILLISECONDS) - .withType(MetricType.GAUGE) - .build(); - Metadata acquireCountMetadata = Metadata.builder() - .withName("agroal.acquire.count") - .withDescription("Number of times an acquire operation succeeded.") - .withType(MetricType.COUNTER) - .build(); - Metadata creationCountMetadata = Metadata.builder() - .withName("agroal.creation.count") - .withDescription("Number of created connections.") - .withType(MetricType.COUNTER) - .build(); - Metadata leakDetectionCountMetadata = Metadata.builder() - .withName("agroal.leak.detection.count") - .withDescription("Number of times a leak was detected. A single connection can be detected multiple times.") - .withType(MetricType.COUNTER) - .build(); - Metadata destroyCountMetadata = Metadata.builder() - .withName("agroal.destroy.count") - .withDescription("Number of destroyed connections.") - .withType(MetricType.COUNTER) - .build(); - Metadata flushCountMetadata = Metadata.builder() - .withName("agroal.flush.count") - .withDescription("Number of connections removed from the pool, not counting invalid / idle.") - .withType(MetricType.COUNTER) - .build(); - Metadata invalidCountMetadata = Metadata.builder() - .withName("agroal.invalid.count") - .withDescription("Number of connections removed from the pool for being invalid.") - .withType(MetricType.COUNTER) - .build(); - Metadata reapCountMetadata = Metadata.builder() - .withName("agroal.reap.count") - .withDescription("Number of connections removed from the pool for being idle.") - .withType(MetricType.COUNTER) - .build(); - - for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedDataSourceBuildTimeConfigs) { - String dataSourceName = aggregatedDataSourceBuildTimeConfig.getName(); - // expose metrics for this datasource if metrics are enabled both globally and for this data source - // (they are enabled for each data source by default if they are also enabled globally) - boolean metricsEnabledForThisDatasource = dataSourcesBuildTimeConfig.metricsEnabled && - aggregatedDataSourceBuildTimeConfig.getJdbcConfig().enableMetrics.orElse(true); - Tag tag = new Tag("datasource", DataSourceUtil.isDefault(dataSourceName) ? "default" : dataSourceName); - String configRootName = "datasource"; - metrics.produce(new MetricBuildItem(activeCountMetadata, - new AgroalGauge(dataSourceName, "activeCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(maxUsedCountMetadata, - new AgroalGauge(dataSourceName, "maxUsedCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(awaitingCountMetadata, - new AgroalGauge(dataSourceName, "awaitingCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(availableCountMetadata, - new AgroalGauge(dataSourceName, "availableCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(blockingTimeAverageMetadata, - new AgroalGauge(dataSourceName, "blockingTimeAverage"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(blockingTimeMaxMetadata, - new AgroalGauge(dataSourceName, "blockingTimeMax"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(blockingTimeTotalMetadata, - new AgroalGauge(dataSourceName, "blockingTimeTotal"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(creationTimeAverageMetadata, - new AgroalGauge(dataSourceName, "creationTimeAverage"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(creationTimeMaxMetadata, - new AgroalGauge(dataSourceName, "creationTimeMax"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(creationTimeTotalMetadata, - new AgroalGauge(dataSourceName, "creationTimeTotal"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(acquireCountMetadata, - new AgroalCounter(dataSourceName, "acquireCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(creationCountMetadata, - new AgroalCounter(dataSourceName, "creationCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(leakDetectionCountMetadata, - new AgroalCounter(dataSourceName, "leakDetectionCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(destroyCountMetadata, - new AgroalCounter(dataSourceName, "destroyCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(flushCountMetadata, - new AgroalCounter(dataSourceName, "flushCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(invalidCountMetadata, - new AgroalCounter(dataSourceName, "invalidCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - metrics.produce(new MetricBuildItem(reapCountMetadata, - new AgroalCounter(dataSourceName, "reapCount"), - metricsEnabledForThisDatasource, - configRootName, - tag)); - } - } } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/AgroalMetricsTestCase.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/AgroalMetricsTestCase.java index 95d46dc3230be..bff4db9e227e6 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/AgroalMetricsTestCase.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/AgroalMetricsTestCase.java @@ -39,10 +39,11 @@ public class AgroalMetricsTestCase { @Test public void testMetricsOfDefaultDS() throws SQLException { - Counter acquireCount = registry.getCounters().get(new MetricID("agroal.acquire.count", - new Tag("datasource", "default"))); - Gauge maxUsed = registry.getGauges().get(new MetricID("agroal.max.used.count", - new Tag("datasource", "default"))); + Counter acquireCount = registry.getCounters() + .get(new MetricID("agroal.acquire.count", new Tag("datasource", "default"))); + Gauge maxUsed = registry.getGauges() + .get(new MetricID("agroal.max.used.count", new Tag("datasource", "default"))); + Assertions.assertNotNull(acquireCount, "Agroal metrics should be registered eagerly"); Assertions.assertNotNull(maxUsed, "Agroal metrics should be registered eagerly"); diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/CompatibleKindAndDriverDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/CompatibleKindAndDriverDataSourceConfigTest.java deleted file mode 100644 index 85794a696b2c4..0000000000000 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/CompatibleKindAndDriverDataSourceConfigTest.java +++ /dev/null @@ -1,24 +0,0 @@ -package io.quarkus.agroal.test; - -import javax.inject.Inject; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.agroal.api.AgroalDataSource; -import io.quarkus.test.QuarkusUnitTest; - -public class CompatibleKindAndDriverDataSourceConfigTest { - - @Inject - AgroalDataSource defaultDataSource; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withConfigurationResource("compatible-kind-and-driver-datasource.properties"); - - @Test - public void testApplicationStarts() { - - } -} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DefaultDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DefaultDataSourceConfigTest.java index ad3495062885c..6783d51864248 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DefaultDataSourceConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DefaultDataSourceConfigTest.java @@ -1,9 +1,11 @@ package io.quarkus.agroal.test; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.entry; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -import java.sql.Connection; import java.sql.SQLException; import java.time.Duration; @@ -38,7 +40,7 @@ public void testDefaultDataSourceInjection() throws SQLException { private static void testDataSource(AgroalDataSource dataSource, String username, int minSize, int maxSize, int initialSize, Duration backgroundValidationInterval, Duration acquisitionTimeout, Duration leakDetectionInterval, - Duration idleRemovalInterval, Duration maxLifetime, String newConnectionSql) throws SQLException { + Duration idleRemovalInterval, Duration maxLifetime, String newConnectionSql) { AgroalConnectionPoolConfiguration configuration = dataSource.getConfiguration().connectionPoolConfiguration(); AgroalConnectionFactoryConfiguration agroalConnectionFactoryConfiguration = configuration .connectionFactoryConfiguration(); @@ -59,7 +61,10 @@ private static void testDataSource(AgroalDataSource dataSource, String username, assertTrue(agroalConnectionFactoryConfiguration.trackJdbcResources()); assertTrue(dataSource.getConfiguration().metricsEnabled()); assertEquals(newConnectionSql, agroalConnectionFactoryConfiguration.initialSql()); - try (Connection connection = dataSource.getConnection()) { - } + assertThat(agroalConnectionFactoryConfiguration.jdbcProperties()) + .contains( + entry("extraProperty1", "extraProperty1Value"), + entry("extraProperty2", "extraProperty2Value")); + assertThatCode(() -> dataSource.getConnection().close()).doesNotThrowAnyException(); } } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DeprecatedDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DeprecatedDataSourceConfigTest.java new file mode 100644 index 0000000000000..36d73a1bf2c6b --- /dev/null +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DeprecatedDataSourceConfigTest.java @@ -0,0 +1,29 @@ +package io.quarkus.agroal.test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class DeprecatedDataSourceConfigTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withConfigurationResource("application-deprecated-config.properties") + .assertException(e -> { + assertThat(e).isInstanceOf(ConfigurationException.class); + assertThat(e) + .hasMessageStartingWith("quarkus.datasource.url and quarkus.datasource.driver have been deprecated"); + }); + + @Test + public void deprecatedConfigThrowsException() { + // Should not be reached: verify + assertTrue(false); + } + +} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DevModeTestEndpoint.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DevModeTestEndpoint.java index 2152c6ba1e24e..d8a641e6299a1 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DevModeTestEndpoint.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/DevModeTestEndpoint.java @@ -17,10 +17,10 @@ public class DevModeTestEndpoint { @GET - @Path("/{dataSourceName}/{jdbcUrl}/{username}/{minSize}/{maxSize}") + @Path("/{dataSourceName}/{jdbcUrl}/{username}/{maxSize}") public String test(@PathParam("dataSourceName") String dataSourceName, @PathParam("jdbcUrl") String jdbcUrl, @PathParam("username") String username, - @PathParam("minSize") int minSize, @PathParam("maxSize") int maxSize) throws Exception { + @PathParam("maxSize") int maxSize) throws Exception { AgroalDataSource ds; if (dataSourceName.equals("default")) { ds = CDI.current().select(AgroalDataSource.class) @@ -29,7 +29,7 @@ public String test(@PathParam("dataSourceName") String dataSourceName, @PathPara ds = CDI.current().select(AgroalDataSource.class, new DataSource.DataSourceLiteral(dataSourceName)) .get(); } - testDataSource(dataSourceName, ds, URLDecoder.decode(jdbcUrl, StandardCharsets.UTF_8.name()), username, minSize, + testDataSource(dataSourceName, ds, URLDecoder.decode(jdbcUrl, StandardCharsets.UTF_8.name()), username, maxSize); return "ok"; } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverDataSourceConfigTest.java deleted file mode 100644 index 6e31c46d9b126..0000000000000 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverDataSourceConfigTest.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.quarkus.agroal.test; - -import static org.junit.jupiter.api.Assertions.fail; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.test.QuarkusUnitTest; - -public class IncompatibleKindAndDriverDataSourceConfigTest { - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withConfigurationResource("incompatible-kind-and-driver-datasource.properties") - .setExpectedException(RuntimeException.class); - - @Test - public void testApplicationShouldNotStart() { - fail("Application should not start when db-kind and driver are incompatible"); - } -} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverMultipleDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverMultipleDataSourceConfigTest.java deleted file mode 100644 index c47a4342e256e..0000000000000 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/IncompatibleKindAndDriverMultipleDataSourceConfigTest.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.quarkus.agroal.test; - -import static org.junit.jupiter.api.Assertions.fail; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.test.QuarkusUnitTest; - -public class IncompatibleKindAndDriverMultipleDataSourceConfigTest { - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withConfigurationResource("incompatible-multiple-datasources.properties") - .setExpectedException(RuntimeException.class); - - @Test - public void testApplicationShouldNotStart() { - fail("Application should not start when db-kind and driver are incompatible"); - } -} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyDefaultDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyDefaultDataSourceConfigTest.java deleted file mode 100644 index 8367cd633917b..0000000000000 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyDefaultDataSourceConfigTest.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.quarkus.agroal.test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.sql.Connection; -import java.sql.SQLException; -import java.time.Duration; - -import javax.inject.Inject; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.agroal.api.AgroalDataSource; -import io.agroal.api.configuration.AgroalConnectionFactoryConfiguration; -import io.agroal.api.configuration.AgroalConnectionPoolConfiguration; -import io.agroal.narayana.NarayanaTransactionIntegration; -import io.quarkus.test.QuarkusUnitTest; - -public class LegacyDefaultDataSourceConfigTest { - - //tag::injection[] - @Inject - AgroalDataSource defaultDataSource; - //end::injection[] - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withConfigurationResource("legacy-application-default-datasource.properties"); - - @Test - public void testDefaultDataSourceInjection() throws SQLException { - testDataSource(defaultDataSource, "username-default", 3, 13, 7, Duration.ofSeconds(53), Duration.ofSeconds(54), - Duration.ofSeconds(55), Duration.ofSeconds(56), Duration.ofSeconds(57), - "create schema if not exists schema_default"); - } - - private static void testDataSource(AgroalDataSource dataSource, String username, int minSize, int maxSize, - int initialSize, Duration backgroundValidationInterval, Duration acquisitionTimeout, Duration leakDetectionInterval, - Duration idleRemovalInterval, Duration maxLifetime, String newConnectionSql) throws SQLException { - AgroalConnectionPoolConfiguration configuration = dataSource.getConfiguration().connectionPoolConfiguration(); - AgroalConnectionFactoryConfiguration agroalConnectionFactoryConfiguration = configuration - .connectionFactoryConfiguration(); - - assertEquals("jdbc:h2:tcp://localhost/mem:default", agroalConnectionFactoryConfiguration.jdbcUrl()); - assertEquals(username, agroalConnectionFactoryConfiguration.principal().getName()); - assertEquals(minSize, configuration.minSize()); - assertEquals(maxSize, configuration.maxSize()); - assertEquals(initialSize, configuration.initialSize()); - assertEquals(backgroundValidationInterval, configuration.validationTimeout()); - assertEquals(acquisitionTimeout, configuration.acquisitionTimeout()); - assertEquals(leakDetectionInterval, configuration.leakTimeout()); - assertEquals(idleRemovalInterval, configuration.reapTimeout()); - assertEquals(maxLifetime, configuration.maxLifetime()); - assertTrue(configuration.transactionIntegration() instanceof NarayanaTransactionIntegration); - assertEquals(AgroalConnectionFactoryConfiguration.TransactionIsolation.SERIALIZABLE, - agroalConnectionFactoryConfiguration.jdbcTransactionIsolation()); - assertTrue(agroalConnectionFactoryConfiguration.trackJdbcResources()); - assertTrue(dataSource.getConfiguration().metricsEnabled()); - assertEquals(newConnectionSql, agroalConnectionFactoryConfiguration.initialSql()); - try (Connection connection = dataSource.getConnection()) { - } - } -} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyMultipleDataSourcesConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyMultipleDataSourcesConfigTest.java deleted file mode 100644 index 2510247a34e26..0000000000000 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/LegacyMultipleDataSourcesConfigTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package io.quarkus.agroal.test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -import java.sql.Connection; -import java.sql.SQLException; - -import javax.inject.Inject; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.agroal.api.AgroalDataSource; -import io.agroal.api.configuration.AgroalConnectionPoolConfiguration; -import io.quarkus.agroal.DataSource; -import io.quarkus.test.QuarkusUnitTest; - -public class LegacyMultipleDataSourcesConfigTest { - - //tag::injection[] - @Inject - AgroalDataSource defaultDataSource; - - @Inject - @DataSource("users") - AgroalDataSource dataSource1; - - @Inject - @DataSource("inventory") - AgroalDataSource dataSource2; - //end::injection[] - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .withConfigurationResource("legacy-application-multiple-datasources.properties"); - - @Test - public void testDataSourceInjection() throws SQLException { - testDataSource("default", defaultDataSource, "jdbc:h2:tcp://localhost/mem:default", "username-default", 3, 13); - testDataSource("users", dataSource1, "jdbc:h2:tcp://localhost/mem:users", "username1", 1, 11); - testDataSource("inventory", dataSource2, "jdbc:h2:tcp://localhost/mem:inventory", "username2", 2, 12); - } - - private static void testDataSource(String dataSourceName, AgroalDataSource dataSource, String jdbcUrl, String username, - int minSize, int maxSize) - throws SQLException { - AgroalConnectionPoolConfiguration configuration = null; - - try { - configuration = dataSource.getConfiguration().connectionPoolConfiguration(); - } catch (NullPointerException e) { - // we catch the NPE here as we have a proxy and we can't test dataSource directly - fail("Datasource " + dataSourceName + " should not be null"); - } - assertEquals(jdbcUrl, configuration.connectionFactoryConfiguration().jdbcUrl()); - assertEquals(username, configuration.connectionFactoryConfiguration().principal().getName()); - assertEquals(minSize, configuration.minSize()); - assertEquals(maxSize, configuration.maxSize()); - - try (Connection connection = dataSource.getConnection()) { - } - } -} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigDevModeTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigDevModeTest.java index da422852e1319..53618b3a638c5 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigDevModeTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigDevModeTest.java @@ -41,7 +41,7 @@ static void runTest(String dataSourceName, String jdbcUrl, String username, int minSize, int maxSize) throws UnsupportedEncodingException { RestAssured .get("/test/" + dataSourceName + "/" + URLEncoder.encode(jdbcUrl, StandardCharsets.UTF_8.name()) + "/" - + username + "/" + minSize + "/" + maxSize) + + username + "/" + maxSize) .then() .statusCode(200).body(Matchers.equalTo("ok")); } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigTest.java index f04c48499bd84..cfbb502ae9816 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesConfigTest.java @@ -38,8 +38,8 @@ public class MultipleDataSourcesConfigTest { @Test public void testDataSourceInjection() throws SQLException { - testDataSource("default", defaultDataSource, "jdbc:h2:tcp://localhost/mem:default", "username-default", 3, 13); - testDataSource("users", dataSource1, "jdbc:h2:tcp://localhost/mem:users", "username1", 1, 11); - testDataSource("inventory", dataSource2, "jdbc:h2:tcp://localhost/mem:inventory", "username2", 2, 12); + testDataSource("default", defaultDataSource, "jdbc:h2:tcp://localhost/mem:default", "username-default", 13); + testDataSource("users", dataSource1, "jdbc:h2:tcp://localhost/mem:users", "username1", 11); + testDataSource("inventory", dataSource2, "jdbc:h2:tcp://localhost/mem:inventory", "username2", 12); } } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesErroneousButWorkingConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesErroneousButWorkingConfigTest.java index 22bcff740a32d..cd68fa18cac32 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesErroneousButWorkingConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesErroneousButWorkingConfigTest.java @@ -36,9 +36,9 @@ public class MultipleDataSourcesErroneousButWorkingConfigTest { @Test public void testDataSourceInjection() throws SQLException { - testDataSource("default", defaultDataSource, "jdbc:h2:tcp://localhost/mem:default", "username-default", 3, 13); - testDataSource("users", dataSource1, "jdbc:h2:tcp://localhost/mem:users", "username1", 1, 11); - testDataSource("inventory", dataSource2, "jdbc:h2:tcp://localhost/mem:inventory", "username2", 2, 12); + testDataSource("default", defaultDataSource, "jdbc:h2:tcp://localhost/mem:default", "username-default", 13); + testDataSource("users", dataSource1, "jdbc:h2:tcp://localhost/mem:users", "username1", 11); + testDataSource("inventory", dataSource2, "jdbc:h2:tcp://localhost/mem:inventory", "username2", 12); } } diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesTestUtil.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesTestUtil.java index b30233373afc6..080b09afbc07c 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesTestUtil.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesTestUtil.java @@ -15,7 +15,7 @@ private MultipleDataSourcesTestUtil() { } static void testDataSource(String dataSourceName, AgroalDataSource dataSource, String jdbcUrl, String username, - int minSize, int maxSize) + int maxSize) throws SQLException { AgroalConnectionPoolConfiguration configuration = null; @@ -27,7 +27,6 @@ static void testDataSource(String dataSourceName, AgroalDataSource dataSource, S } assertEquals(jdbcUrl, configuration.connectionFactoryConfiguration().jdbcUrl()); assertEquals(username, configuration.connectionFactoryConfiguration().principal().getName()); - assertEquals(minSize, configuration.minSize()); assertEquals(maxSize, configuration.maxSize()); try (Connection connection = dataSource.getConnection()) { diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDevServicesDataSourcesConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDevServicesDataSourcesConfigTest.java new file mode 100644 index 0000000000000..19fec44312d04 --- /dev/null +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDevServicesDataSourcesConfigTest.java @@ -0,0 +1,57 @@ +package io.quarkus.agroal.test; + +import static io.quarkus.agroal.test.MultipleDataSourcesTestUtil.testDataSource; + +import java.sql.SQLException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.agroal.api.AgroalDataSource; +import io.quarkus.agroal.DataSource; +import io.quarkus.test.QuarkusUnitTest; + +public class MultipleDevServicesDataSourcesConfigTest { + + //tag::injection[] + @Inject + AgroalDataSource defaultDataSource; + + @Inject + @DataSource("users") + AgroalDataSource dataSource1; + + @Inject + @DataSource("inventory") + AgroalDataSource dataSource2; + //end::injection[] + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClass(MultipleDataSourcesTestUtil.class)) + .withConfigurationResource("application-multiple-devservices-datasources.properties"); + + @Test + public void testDataSourceInjection() throws SQLException { + testDataSource("default", defaultDataSource, + "jdbc:h2:tcp://localhost:" + extractPort(defaultDataSource) + "/mem:default;DB_CLOSE_DELAY=-1", "sa", 20); + testDataSource("users", dataSource1, + "jdbc:h2:tcp://localhost:" + extractPort(dataSource1) + "/mem:users;DB_CLOSE_DELAY=-1", "sa", 20); + testDataSource("inventory", dataSource2, + "jdbc:h2:tcp://localhost:" + extractPort(dataSource2) + "/mem:inventory;DB_CLOSE_DELAY=-1", "sa", 20); + } + + public int extractPort(AgroalDataSource ds) { + String url = ds.getConfiguration().connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl(); + Matcher matcher = Pattern.compile("jdbc:h2:tcp://localhost:(\\d+)/").matcher(url); + matcher.find(); + return Integer.parseInt(matcher.group(1)); + } +} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NamedDataSourceConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NamedDataSourceConfigTest.java index 4e4515e80247b..0fd72eeccf9be 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NamedDataSourceConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NamedDataSourceConfigTest.java @@ -45,7 +45,6 @@ private static void dataSourceAssert(String dataSourceName, AgroalDataSource dat } assertEquals(jdbcUrl, configuration.connectionFactoryConfiguration().jdbcUrl()); assertEquals(username, configuration.connectionFactoryConfiguration().principal().getName()); - assertEquals(minSize, configuration.minSize()); assertEquals(maxSize, configuration.maxSize()); assertFalse(dataSource.getConfiguration().metricsEnabled()); // metrics not enabled by default diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoSuitableDriverTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoSuitableDriverTest.java new file mode 100644 index 0000000000000..da7d8f6072823 --- /dev/null +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/NoSuitableDriverTest.java @@ -0,0 +1,29 @@ +package io.quarkus.agroal.test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class NoSuitableDriverTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withConfigurationResource("application-no-suitable-driver.properties") + .assertException(t -> { + assertEquals(ConfigurationException.class, t.getClass()); + assertThat(t).hasMessageStartingWith("Unable to find a JDBC driver corresponding to the database kind"); + }); + + @Test + public void noSuitableDriver() { + // Should not be reached: verify + assertTrue(false); + } + +} diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/PoolLessTestCase.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/PoolLessTestCase.java index 1cbb9e5042204..20a8b2dd85e68 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/PoolLessTestCase.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/PoolLessTestCase.java @@ -31,8 +31,8 @@ public void testConnectionDisposal() throws SQLException { } } - // Assert there are no connections on the pool - Assertions.assertEquals(0L, defaultDS.getMetrics().availableCount()); + // Assert there are no connections in the pool + Assertions.assertEquals(0L, defaultDS.getMetrics().activeCount()); Assertions.assertEquals(1L, defaultDS.getMetrics().creationCount()); Assertions.assertEquals(1L, defaultDS.getMetrics().acquireCount()); diff --git a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/UnknownDriverConfigTest.java b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/UnknownDriverConfigTest.java index f3b5d4665ab5b..b5a5e602e85b5 100644 --- a/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/UnknownDriverConfigTest.java +++ b/extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/UnknownDriverConfigTest.java @@ -43,7 +43,6 @@ private static void testDataSource(AgroalDataSource dataSource, String username, assertEquals("jdbc:h2:tcp://localhost/mem:default", agroalConnectionFactoryConfiguration.jdbcUrl()); assertEquals(username, agroalConnectionFactoryConfiguration.principal().getName()); - assertEquals(minSize, configuration.minSize()); assertEquals(maxSize, configuration.maxSize()); assertEquals(initialSize, configuration.initialSize()); assertEquals(backgroundValidationInterval, configuration.validationTimeout()); diff --git a/extensions/agroal/deployment/src/test/resources/application-default-datasource-unknown-driver.properties b/extensions/agroal/deployment/src/test/resources/application-default-datasource-unknown-driver.properties index c053b993880f6..c736df20c1c69 100644 --- a/extensions/agroal/deployment/src/test/resources/application-default-datasource-unknown-driver.properties +++ b/extensions/agroal/deployment/src/test/resources/application-default-datasource-unknown-driver.properties @@ -3,11 +3,10 @@ quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default quarkus.datasource.jdbc.driver=org.h2.Driver -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 +quarkus.datasource.jdbc.initial-size=7 quarkus.datasource.jdbc.enable-metrics=true -quarkus.datasource.jdbc.initial-size=7 quarkus.datasource.jdbc.background-validation-interval=53 quarkus.datasource.jdbc.acquisition-timeout=54 quarkus.datasource.jdbc.leak-detection-interval=55 diff --git a/extensions/agroal/deployment/src/test/resources/application-default-datasource.properties b/extensions/agroal/deployment/src/test/resources/application-default-datasource.properties index 81ba3f917e392..83ff238367387 100644 --- a/extensions/agroal/deployment/src/test/resources/application-default-datasource.properties +++ b/extensions/agroal/deployment/src/test/resources/application-default-datasource.properties @@ -1,17 +1,17 @@ -#tag::basic[] quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default quarkus.datasource.jdbc.min-size=3 +quarkus.datasource.jdbc.initial-size=7 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.jdbc.enable-metrics=true -#end::basic[] -quarkus.datasource.jdbc.initial-size=7 quarkus.datasource.jdbc.background-validation-interval=53 quarkus.datasource.jdbc.acquisition-timeout=54 quarkus.datasource.jdbc.leak-detection-interval=55 quarkus.datasource.jdbc.idle-removal-interval=56 quarkus.datasource.jdbc.max-lifetime=57 quarkus.datasource.jdbc.transaction-isolation-level=serializable -quarkus.datasource.jdbc.new-connection-sql=create schema if not exists schema_default \ No newline at end of file +quarkus.datasource.jdbc.new-connection-sql=create schema if not exists schema_default +quarkus.datasource.jdbc.additional-jdbc-properties.extraProperty1=extraProperty1Value +quarkus.datasource.jdbc.additional-jdbc-properties.extraProperty2=extraProperty2Value \ No newline at end of file diff --git a/extensions/agroal/deployment/src/test/resources/application-deprecated-config.properties b/extensions/agroal/deployment/src/test/resources/application-deprecated-config.properties new file mode 100644 index 0000000000000..fed3e2f0a4108 --- /dev/null +++ b/extensions/agroal/deployment/src/test/resources/application-deprecated-config.properties @@ -0,0 +1,4 @@ +quarkus.datasource.driver=org.h2.Driver +quarkus.datasource.username=username-default +quarkus.datasource.url=jdbc:h2:tcp://localhost/mem:default +quarkus.datasource.max-size=13 diff --git a/extensions/agroal/deployment/src/test/resources/application-metrics-enabled.properties b/extensions/agroal/deployment/src/test/resources/application-metrics-enabled.properties index 0c0b871f538ae..5729cf306aa86 100644 --- a/extensions/agroal/deployment/src/test/resources/application-metrics-enabled.properties +++ b/extensions/agroal/deployment/src/test/resources/application-metrics-enabled.properties @@ -1,17 +1,15 @@ quarkus.datasource.metrics.enabled=true # default ds -quarkus.datasource.enable-metrics=true -quarkus.datasource.url=jdbc:h2:mem:defaultdb -quarkus.datasource.driver=org.h2.Driver +quarkus.datasource.db-kind=h2 quarkus.datasource.username=username1 -quarkus.datasource.min-size=3 -quarkus.datasource.max-size=13 +quarkus.datasource.jdbc.url=jdbc:h2:mem:defaultdb +quarkus.datasource.jdbc.max-size=13 +quarkus.datasource.jdbc.enable-metrics=true # another ds -quarkus.datasource.ds1.enable-metrics=true -quarkus.datasource.ds1.url=jdbc:h2:mem:db1 -quarkus.datasource.ds1.driver=org.h2.Driver +quarkus.datasource.ds1.db-kind=h2 quarkus.datasource.ds1.username=username1 -quarkus.datasource.ds1.min-size=3 -quarkus.datasource.ds1.max-size=13 \ No newline at end of file +quarkus.datasource.ds1.jdbc.url=jdbc:h2:mem:db1 +quarkus.datasource.ds1.jdbc.max-size=13 +quarkus.datasource.ds1.jdbc.enable-metrics=true \ No newline at end of file diff --git a/extensions/agroal/deployment/src/test/resources/application-multiple-datasources-erroneous-but-working.properties b/extensions/agroal/deployment/src/test/resources/application-multiple-datasources-erroneous-but-working.properties index ba211b4d09300..b0b0dd55dd85f 100644 --- a/extensions/agroal/deployment/src/test/resources/application-multiple-datasources-erroneous-but-working.properties +++ b/extensions/agroal/deployment/src/test/resources/application-multiple-datasources-erroneous-but-working.properties @@ -1,19 +1,16 @@ quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.users.db-kind=h2 quarkus.datasource.users.username=username1 quarkus.datasource.users.driver=org.h2.Driver quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 quarkus.datasource.users.jdbc.max-size=11 quarkus.datasource.inventory.db-kind=h2 quarkus.datasource.inventory.username=username2 quarkus.datasource.inventory.jdbc.driver=org.h2.Driver quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 quarkus.datasource.inventory.jdbc.max-size=12 diff --git a/extensions/agroal/deployment/src/test/resources/application-multiple-datasources.properties b/extensions/agroal/deployment/src/test/resources/application-multiple-datasources.properties index fdcdde17599fc..a56b071e0abcf 100644 --- a/extensions/agroal/deployment/src/test/resources/application-multiple-datasources.properties +++ b/extensions/agroal/deployment/src/test/resources/application-multiple-datasources.properties @@ -1,19 +1,16 @@ quarkus.datasource.db-kind=h2 quarkus.datasource.username=username-default quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 quarkus.datasource.jdbc.max-size=13 quarkus.datasource.users.db-kind=h2 quarkus.datasource.users.username=username1 quarkus.datasource.users.jdbc.driver=org.h2.Driver quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 quarkus.datasource.users.jdbc.max-size=11 quarkus.datasource.inventory.db-kind=h2 quarkus.datasource.inventory.username=username2 quarkus.datasource.inventory.jdbc.driver=org.h2.Driver quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 quarkus.datasource.inventory.jdbc.max-size=12 diff --git a/extensions/agroal/deployment/src/test/resources/application-multiple-devservices-datasources.properties b/extensions/agroal/deployment/src/test/resources/application-multiple-devservices-datasources.properties new file mode 100644 index 0000000000000..3101fa09fec0c --- /dev/null +++ b/extensions/agroal/deployment/src/test/resources/application-multiple-devservices-datasources.properties @@ -0,0 +1,2 @@ +quarkus.datasource.users.devservices=true +quarkus.datasource.inventory.devservices=true diff --git a/extensions/agroal/deployment/src/test/resources/application-named-datasource.properties b/extensions/agroal/deployment/src/test/resources/application-named-datasource.properties index f0eee864fc8a6..df9feff733394 100644 --- a/extensions/agroal/deployment/src/test/resources/application-named-datasource.properties +++ b/extensions/agroal/deployment/src/test/resources/application-named-datasource.properties @@ -2,5 +2,4 @@ quarkus.datasource.testing.db-kind=h2 quarkus.datasource.testing.username=username-named quarkus.datasource.testing.jdbc.url=jdbc:h2:tcp://localhost/mem:testing quarkus.datasource.testing.jdbc.driver=org.h2.Driver -quarkus.datasource.testing.jdbc.min-size=3 quarkus.datasource.testing.jdbc.max-size=13 diff --git a/extensions/agroal/deployment/src/test/resources/application-no-suitable-driver.properties b/extensions/agroal/deployment/src/test/resources/application-no-suitable-driver.properties new file mode 100644 index 0000000000000..08161477c2aef --- /dev/null +++ b/extensions/agroal/deployment/src/test/resources/application-no-suitable-driver.properties @@ -0,0 +1,3 @@ +quarkus.datasource.db-kind=no-suitable-driver +quarkus.datasource.username=username-named +quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:testing diff --git a/extensions/agroal/deployment/src/test/resources/application-pool-interception.properties b/extensions/agroal/deployment/src/test/resources/application-pool-interception.properties index d67e193a8905d..4f4b26aead078 100644 --- a/extensions/agroal/deployment/src/test/resources/application-pool-interception.properties +++ b/extensions/agroal/deployment/src/test/resources/application-pool-interception.properties @@ -1,23 +1,20 @@ # default ds -quarkus.datasource.url=jdbc:h2:mem:db0; -quarkus.datasource.driver=org.h2.Driver +quarkus.datasource.db-kind=h2 quarkus.datasource.username=username1 -quarkus.datasource.min-size=0 -quarkus.datasource.max-size=2 -quarkus.datasource.new-connection-sql=CREATE SCHEMA IF NOT EXISTS INTERCEPTOR; +quarkus.datasource.jdbc.url=jdbc:h2:mem:db0; +quarkus.datasource.jdbc.max-size=2 +quarkus.datasource.jdbc.new-connection-sql=CREATE SCHEMA IF NOT EXISTS INTERCEPTOR; # another ds -quarkus.datasource.another.url=jdbc:h2:mem:db1 -quarkus.datasource.another.driver=org.h2.Driver +quarkus.datasource.another.db-kind=h2 quarkus.datasource.another.username=username1 -quarkus.datasource.another.min-size=0 -quarkus.datasource.another.max-size=2 -quarkus.datasource.another.new-connection-sql=CREATE SCHEMA IF NOT EXISTS LOW; CREATE SCHEMA IF NOT EXISTS PRIORITY; +quarkus.datasource.another.jdbc.url=jdbc:h2:mem:db1 +quarkus.datasource.another.jdbc.max-size=2 +quarkus.datasource.another.jdbc.new-connection-sql=CREATE SCHEMA IF NOT EXISTS LOW; CREATE SCHEMA IF NOT EXISTS PRIORITY; # a ds that does not have interception -quarkus.datasource.pure.url=jdbc:h2:mem:db2 -quarkus.datasource.pure.driver=org.h2.Driver +quarkus.datasource.pure.db-kind=h2 quarkus.datasource.pure.username=username1 -quarkus.datasource.pure.min-size=0 -quarkus.datasource.pure.max-size=2 -quarkus.datasource.pure.new-connection-sql=CREATE SCHEMA IF NOT EXISTS PURE; +quarkus.datasource.pure.jdbc.url=jdbc:h2:mem:db2 +quarkus.datasource.pure.jdbc.max-size=2 +quarkus.datasource.pure.jdbc.new-connection-sql=CREATE SCHEMA IF NOT EXISTS PURE; diff --git a/extensions/agroal/deployment/src/test/resources/compatible-kind-and-driver-datasource.properties b/extensions/agroal/deployment/src/test/resources/compatible-kind-and-driver-datasource.properties deleted file mode 100644 index fc9fdf9e18491..0000000000000 --- a/extensions/agroal/deployment/src/test/resources/compatible-kind-and-driver-datasource.properties +++ /dev/null @@ -1,8 +0,0 @@ -quarkus.datasource.db-kind=h2 -quarkus.datasource.driver=org.h2.Driver -quarkus.datasource.username=username-default - -quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 -quarkus.datasource.jdbc.max-size=13 -quarkus.datasource.jdbc.enable-metrics=true diff --git a/extensions/agroal/deployment/src/test/resources/incompatible-kind-and-driver-datasource.properties b/extensions/agroal/deployment/src/test/resources/incompatible-kind-and-driver-datasource.properties deleted file mode 100644 index 8344e9da33c09..0000000000000 --- a/extensions/agroal/deployment/src/test/resources/incompatible-kind-and-driver-datasource.properties +++ /dev/null @@ -1,8 +0,0 @@ -quarkus.datasource.db-kind=h2 -quarkus.datasource.driver=org.postgresql.Driver -quarkus.datasource.username=username-default - -quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 -quarkus.datasource.jdbc.max-size=13 -quarkus.datasource.jdbc.enable-metrics=true diff --git a/extensions/agroal/deployment/src/test/resources/incompatible-multiple-datasources.properties b/extensions/agroal/deployment/src/test/resources/incompatible-multiple-datasources.properties deleted file mode 100644 index ceb1dc67f953e..0000000000000 --- a/extensions/agroal/deployment/src/test/resources/incompatible-multiple-datasources.properties +++ /dev/null @@ -1,18 +0,0 @@ -quarkus.datasource.db-kind=h2 -quarkus.datasource.username=username-default -quarkus.datasource.jdbc.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.jdbc.min-size=3 -quarkus.datasource.jdbc.max-size=13 - -quarkus.datasource.users.db-kind=h2 -quarkus.datasource.users.username=username1 -quarkus.datasource.users.jdbc.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.jdbc.min-size=1 -quarkus.datasource.users.jdbc.max-size=11 - -quarkus.datasource.inventory.db-kind=h2 -quarkus.datasource.inventory.driver=org.postgresql.Driver -quarkus.datasource.inventory.username=username2 -quarkus.datasource.inventory.jdbc.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.jdbc.min-size=2 -quarkus.datasource.inventory.jdbc.max-size=12 diff --git a/extensions/agroal/deployment/src/test/resources/legacy-application-default-datasource.properties b/extensions/agroal/deployment/src/test/resources/legacy-application-default-datasource.properties deleted file mode 100644 index 74d72ce5e6dfb..0000000000000 --- a/extensions/agroal/deployment/src/test/resources/legacy-application-default-datasource.properties +++ /dev/null @@ -1,16 +0,0 @@ -#tag::basic[] -quarkus.datasource.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.driver=org.h2.Driver -quarkus.datasource.username=username-default -quarkus.datasource.min-size=3 -quarkus.datasource.max-size=13 -quarkus.datasource.enable-metrics=true -#end::basic[] -quarkus.datasource.initial-size=7 -quarkus.datasource.background-validation-interval=53 -quarkus.datasource.acquisition-timeout=54 -quarkus.datasource.leak-detection-interval=55 -quarkus.datasource.idle-removal-interval=56 -quarkus.datasource.max-lifetime=57 -quarkus.datasource.transaction-isolation-level=serializable -quarkus.datasource.new-connection-sql=create schema if not exists schema_default \ No newline at end of file diff --git a/extensions/agroal/deployment/src/test/resources/legacy-application-multiple-datasources.properties b/extensions/agroal/deployment/src/test/resources/legacy-application-multiple-datasources.properties deleted file mode 100644 index 98fc934def1e7..0000000000000 --- a/extensions/agroal/deployment/src/test/resources/legacy-application-multiple-datasources.properties +++ /dev/null @@ -1,17 +0,0 @@ -quarkus.datasource.driver=org.h2.Driver -quarkus.datasource.url=jdbc:h2:tcp://localhost/mem:default -quarkus.datasource.username=username-default -quarkus.datasource.min-size=3 -quarkus.datasource.max-size=13 - -quarkus.datasource.users.driver=org.h2.Driver -quarkus.datasource.users.url=jdbc:h2:tcp://localhost/mem:users -quarkus.datasource.users.username=username1 -quarkus.datasource.users.min-size=1 -quarkus.datasource.users.max-size=11 - -quarkus.datasource.inventory.driver=org.h2.Driver -quarkus.datasource.inventory.url=jdbc:h2:tcp://localhost/mem:inventory -quarkus.datasource.inventory.username=username2 -quarkus.datasource.inventory.min-size=2 -quarkus.datasource.inventory.max-size=12 diff --git a/extensions/agroal/pom.xml b/extensions/agroal/pom.xml index 8a54c82f8d120..2ebcce6c78b23 100644 --- a/extensions/agroal/pom.xml +++ b/extensions/agroal/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/agroal/runtime/pom.xml b/extensions/agroal/runtime/pom.xml index 1ee243105ee45..54183eb20ebf2 100644 --- a/extensions/agroal/runtime/pom.xml +++ b/extensions/agroal/runtime/pom.xml @@ -6,7 +6,6 @@ quarkus-agroal-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 @@ -33,6 +32,7 @@ org.graalvm.nativeimage svm + provided @@ -41,11 +41,6 @@ quarkus-smallrye-health true - - io.quarkus - quarkus-smallrye-metrics - true - org.jboss.narayana.jta diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/DataSource.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/DataSource.java index 7afac46f239e7..cdf7ea6fde4e6 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/DataSource.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/DataSource.java @@ -24,7 +24,7 @@ String value(); - class DataSourceLiteral extends AnnotationLiteral implements DataSource { + public class DataSourceLiteral extends AnnotationLiteral implements DataSource { private String name; diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java index 26aa1715184f1..2b9c324b2a263 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java @@ -29,8 +29,8 @@ public class DataSourceJdbcBuildTimeConfig { public TransactionIntegration transactions = TransactionIntegration.ENABLED; /** - * Enable datasource metrics collection. If unspecified, collecting metrics will be enabled by default if the - * smallrye-metrics extension is active. + * Enable datasource metrics collection. If unspecified, collecting metrics will be enabled by default if + * a metrics extension is active. */ @ConfigItem public Optional enableMetrics = Optional.empty(); diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcRuntimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcRuntimeConfig.java index 5b9c310b44676..34c7556b662c5 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcRuntimeConfig.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcRuntimeConfig.java @@ -1,6 +1,7 @@ package io.quarkus.agroal.runtime; import java.time.Duration; +import java.util.Map; import java.util.Optional; import java.util.OptionalInt; @@ -103,4 +104,11 @@ public class DataSourceJdbcRuntimeConfig { */ @ConfigItem(defaultValue = "true") public boolean poolingEnabled = true; + + /** + * Other unspecified properties to be passed to the JDBC driver when creating new connections. + */ + @ConfigItem + public Map additionalJdbcProperties; + } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceSupport.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceSupport.java index 471598fa0dd15..455d53ee144a4 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceSupport.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceSupport.java @@ -23,18 +23,16 @@ public static class Entry { public String dataSourceName; public String resolvedDbKind; public String resolvedDriverClass; - public boolean isLegacy; public boolean isDefault; public Entry() { } - public Entry(String dataSourceName, String resolvedDbKind, String resolvedDriverClass, boolean isLegacy, + public Entry(String dataSourceName, String resolvedDbKind, String resolvedDriverClass, boolean isDefault) { this.dataSourceName = dataSourceName; this.resolvedDbKind = resolvedDbKind; this.resolvedDriverClass = resolvedDriverClass; - this.isLegacy = isLegacy; this.isDefault = isDefault; } } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java index 66c86d87c01bb..411f2aed72677 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java @@ -5,6 +5,7 @@ import java.sql.Statement; import java.util.Collection; import java.util.Iterator; +import java.util.Map; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -46,9 +47,6 @@ import io.quarkus.datasource.runtime.DataSourceRuntimeConfig; import io.quarkus.datasource.runtime.DataSourcesBuildTimeConfig; import io.quarkus.datasource.runtime.DataSourcesRuntimeConfig; -import io.quarkus.datasource.runtime.LegacyDataSourceRuntimeConfig; -import io.quarkus.datasource.runtime.LegacyDataSourcesRuntimeConfig; -import io.quarkus.runtime.configuration.ConfigurationException; /** * This class is sort of a producer for {@link AgroalDataSource}. @@ -59,7 +57,6 @@ * {@link AgroalRecorder#agroalDataSourceSupplier(String, DataSourcesRuntimeConfig)}) * in order to produce the actual {@code AgroalDataSource} objects. */ -@SuppressWarnings("deprecation") @Singleton public class DataSources { @@ -69,9 +66,6 @@ public class DataSources { private final DataSourcesRuntimeConfig dataSourcesRuntimeConfig; private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig; private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig; - private final LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig; - private final LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig; - private final LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig; private final TransactionManager transactionManager; private final TransactionSynchronizationRegistry transactionSynchronizationRegistry; private final DataSourceSupport dataSourceSupport; @@ -82,18 +76,13 @@ public class DataSources { public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig, - LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig, - LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig, - LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig, TransactionManager transactionManager, + TransactionManager transactionManager, TransactionSynchronizationRegistry transactionSynchronizationRegistry, DataSourceSupport dataSourceSupport, @Any Instance agroalPoolInterceptors) { this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig; this.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig; this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig; this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig; - this.legacyDataSourcesJdbcBuildTimeConfig = legacyDataSourcesJdbcBuildTimeConfig; - this.legacyDataSourcesRuntimeConfig = legacyDataSourcesRuntimeConfig; - this.legacyDataSourcesJdbcRuntimeConfig = legacyDataSourcesJdbcRuntimeConfig; this.transactionManager = transactionManager; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; this.dataSourceSupport = dataSourceSupport; @@ -133,48 +122,19 @@ public AgroalDataSource doCreateDataSource(String dataSourceName) { DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName); DataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName); DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName); - LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig = getLegacyDataSourceJdbcBuildTimeConfig( - dataSourceName); - LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig = getLegacyDataSourceRuntimeConfig(dataSourceName); - LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig = getLegacyDataSourceJdbcRuntimeConfig( - dataSourceName); DataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName); - boolean isLegacy = matchingSupportEntry.isLegacy; - if (!isLegacy) { - if (!dataSourceJdbcRuntimeConfig.url.isPresent()) { - String errorMessage; - if (!legacyDataSourceRuntimeConfig.url.isPresent()) { - // we don't have any URL configuration so using a standard message - if (DataSourceUtil.isDefault(dataSourceName)) { - errorMessage = "quarkus.datasource.jdbc.url has not been defined"; - } else { - errorMessage = "quarkus.datasource." + dataSourceName + ".jdbc.url has not been defined"; - } - } else { - // the user mixed legacy configuration and the new style, let's use an appropriate message - if (DataSourceUtil.isDefault(dataSourceName)) { - errorMessage = "Using legacy quarkus.datasource.url with a db-kind is not supported, please use " - + " quarkus.datasource.jdbc.url instead. See https://quarkus.io/guides/datasource for more information."; - } else { - errorMessage = "Using legacy quarkus.datasource." + dataSourceName - + ".url with a db-kind is not supported, please use " - + "quarkus.datasource." + dataSourceName + ".jdbc.url " - + "instead. See https://quarkus.io/guides/datasource for more information."; - } - } - throw new ConfigurationException(errorMessage); - } - } else { - if (!legacyDataSourceRuntimeConfig.url.isPresent()) { - String errorMessage; - if (DataSourceUtil.isDefault(dataSourceName)) { - errorMessage = "quarkus.datasource.url has not been defined"; - } else { - errorMessage = "quarkus.datasource." + dataSourceName + ".url has not been defined"; - } - throw new ConfigurationException(errorMessage); + if (!dataSourceJdbcRuntimeConfig.url.isPresent()) { + String errorMessage; + // we don't have any URL configuration so using a standard message + if (DataSourceUtil.isDefault(dataSourceName)) { + errorMessage = "quarkus.datasource.jdbc.url has not been defined"; + } else { + errorMessage = "quarkus.datasource." + dataSourceName + ".jdbc.url has not been defined"; } + //this is not an error situation, because we want to allow the situation where a JDBC extension + //is installed but has not been configured + return new UnconfiguredDataSource(errorMessage); } // we first make sure that all available JDBC drivers are loaded in the current TCCL @@ -206,14 +166,8 @@ public AgroalDataSource doCreateDataSource(String dataSourceName) { .connectionFactoryConfiguration(); boolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent; - if (!isLegacy) { - applyNewConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, - dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, mpMetricsPresent); - } else { - applyLegacyConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, - dataSourceRuntimeConfig, legacyDataSourceJdbcBuildTimeConfig, legacyDataSourceRuntimeConfig, - legacyDataSourceJdbcRuntimeConfig, mpMetricsPresent); - } + applyNewConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, + dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, mpMetricsPresent); if (dataSourceSupport.disableSslSupport) { if (agroalConnectionConfigurerHandle.isAvailable()) { @@ -298,6 +252,11 @@ private void applyNewConfiguration(AgroalDataSourceConfigurationSupplier dataSou .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); } + // Extra JDBC properties + for (Map.Entry entry : dataSourceJdbcRuntimeConfig.additionalJdbcProperties.entrySet()) { + connectionFactoryConfiguration.jdbcProperty(entry.getKey(), entry.getValue()); + } + // Pool size configuration: poolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize); poolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize); @@ -340,105 +299,6 @@ public boolean isValid(Connection connection) { } } - private void applyLegacyConfiguration(AgroalDataSourceConfigurationSupplier dataSourceConfiguration, - AgroalConnectionPoolConfigurationSupplier poolConfiguration, - AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class driver, - DataSourceRuntimeConfig dataSourceRuntimeConfig, - LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig, - LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig, - LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig, boolean mpMetricsPresent) { - connectionFactoryConfiguration.jdbcUrl(legacyDataSourceRuntimeConfig.url.get()); - connectionFactoryConfiguration.connectionProviderClass(driver); - connectionFactoryConfiguration.trackJdbcResources(legacyDataSourceJdbcRuntimeConfig.detectStatementLeaks); - - if (legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) { - connectionFactoryConfiguration - .jdbcTransactionIsolation( - legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.get()); - } - - if (legacyDataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { - TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, - transactionSynchronizationRegistry); - poolConfiguration.transactionIntegration(txIntegration); - } - - // New connection SQL - if (legacyDataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) { - connectionFactoryConfiguration.initialSql(legacyDataSourceJdbcRuntimeConfig.newConnectionSql.get()); - } - - // metrics - if (legacyDataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) { - dataSourceConfiguration.metricsEnabled(legacyDataSourceJdbcBuildTimeConfig.enableMetrics.get()); - } else { - // if the enable-metrics property is unspecified, treat it as true if MP Metrics are being exposed - dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent); - } - - // Authentication - if (dataSourceRuntimeConfig.username.isPresent()) { - connectionFactoryConfiguration - .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); - } - if (dataSourceRuntimeConfig.password.isPresent()) { - connectionFactoryConfiguration - .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); - } - - // credentials provider - if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) { - String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null); - CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); - - String name = dataSourceRuntimeConfig.credentialsProvider.get(); - connectionFactoryConfiguration - .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); - } - - // Pool size configuration: - poolConfiguration.minSize(legacyDataSourceJdbcRuntimeConfig.minSize); - poolConfiguration.maxSize(legacyDataSourceRuntimeConfig.maxSize); - if (legacyDataSourceJdbcRuntimeConfig.initialSize.isPresent() - && legacyDataSourceJdbcRuntimeConfig.initialSize.get() > 0) { - poolConfiguration.initialSize(legacyDataSourceJdbcRuntimeConfig.initialSize.get()); - } - - // Connection management - poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator()); - if (legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) { - poolConfiguration.acquisitionTimeout(legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.get()); - } - if (legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) { - poolConfiguration.validationTimeout(legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.get()); - } - if (legacyDataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) { - String validationQuery = legacyDataSourceJdbcRuntimeConfig.validationQuerySql.get(); - poolConfiguration.connectionValidator(new ConnectionValidator() { - - @Override - public boolean isValid(Connection connection) { - try (Statement stmt = connection.createStatement()) { - stmt.execute(validationQuery); - return true; - } catch (Exception e) { - log.warn("Connection validation failed", e); - } - return false; - } - }); - } - if (legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) { - poolConfiguration.reapTimeout(legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.get()); - } - if (legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) { - poolConfiguration.leakTimeout(legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.get()); - } - if (legacyDataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) { - poolConfiguration.maxLifetime(legacyDataSourceJdbcRuntimeConfig.maxLifetime.get()); - } - } - public DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesBuildTimeConfig.defaultDataSource; @@ -481,38 +341,6 @@ public DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSou return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig(); } - public LegacyDataSourceJdbcBuildTimeConfig getLegacyDataSourceJdbcBuildTimeConfig(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource; - } - - LegacyDataSourceJdbcBuildTimeConfig namedConfig = legacyDataSourcesJdbcBuildTimeConfig.namedDataSources - .get(dataSourceName); - - return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcBuildTimeConfig(); - } - - public LegacyDataSourceRuntimeConfig getLegacyDataSourceRuntimeConfig(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return legacyDataSourcesRuntimeConfig.defaultDataSource; - } - - LegacyDataSourceRuntimeConfig namedConfig = legacyDataSourcesRuntimeConfig.namedDataSources.get(dataSourceName); - - return namedConfig != null ? namedConfig : new LegacyDataSourceRuntimeConfig(); - } - - public LegacyDataSourceJdbcRuntimeConfig getLegacyDataSourceJdbcRuntimeConfig(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return legacyDataSourcesJdbcRuntimeConfig.defaultDataSource; - } - - LegacyDataSourceJdbcRuntimeConfig namedConfig = legacyDataSourcesJdbcRuntimeConfig.namedDataSources - .get(dataSourceName); - - return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcRuntimeConfig(); - } - /** * Uses the {@link ServiceLoader#load(Class) ServiceLoader to load the JDBC drivers} in context * of the current {@link Thread#getContextClassLoader() TCCL} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcBuildTimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcBuildTimeConfig.java deleted file mode 100644 index 3421431d17b6f..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcBuildTimeConfig.java +++ /dev/null @@ -1,36 +0,0 @@ -package io.quarkus.agroal.runtime; - -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * This configuration class is here for compatibility reason and is planned for removal. - */ -@Deprecated -@ConfigGroup -public class LegacyDataSourceJdbcBuildTimeConfig { - - /** - * @deprecated use quarkus.datasource.db-kind (and quarkus.datasource.jdbc.driver if you really need a specific JDBC - * driver). - */ - @ConfigItem - @Deprecated - public Optional driver; - - /** - * @deprecated use quarkus.datasource.jdbc.transactions instead. - */ - @ConfigItem(defaultValue = "enabled") - @Deprecated - public TransactionIntegration transactions = TransactionIntegration.ENABLED; - - /** - * @deprecated use quarkus.datasource.jdbc.enable-metrics instead. - */ - @ConfigItem - @Deprecated - public Optional enableMetrics = Optional.empty(); -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcRuntimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcRuntimeConfig.java deleted file mode 100644 index d402f97cf317e..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourceJdbcRuntimeConfig.java +++ /dev/null @@ -1,93 +0,0 @@ -package io.quarkus.agroal.runtime; - -import java.time.Duration; -import java.util.Optional; - -import io.agroal.api.configuration.AgroalConnectionFactoryConfiguration; -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * This configuration class is here for compatibility reason and is planned for removal. - */ -@Deprecated -@ConfigGroup -public class LegacyDataSourceJdbcRuntimeConfig { - - /** - * @deprecated use quarkus.datasource.jdbc.initial-size instead. - */ - @ConfigItem - @Deprecated - public Optional initialSize = Optional.empty(); - - /** - * @deprecated use quarkus.datasource.jdbc.min-size instead. - */ - @ConfigItem - @Deprecated - public int minSize = 0; - - /** - * @deprecated use quarkus.datasource.jdbc.background-validation-interval instead. - */ - @ConfigItem(defaultValue = "2M") - @Deprecated - public Optional backgroundValidationInterval = Optional.of(Duration.ofMinutes(2)); - - /** - * @deprecated use quarkus.datasource.jdbc.acquisition-timeout instead. - */ - @ConfigItem(defaultValue = "5") - @Deprecated - public Optional acquisitionTimeout = Optional.of(Duration.ofSeconds(5)); - - /** - * @deprecated use quarkus.datasource.jdbc.leak-detection-interval instead. - */ - @ConfigItem - @Deprecated - public Optional leakDetectionInterval = Optional.empty(); - - /** - * @deprecated use quarkus.datasource.jdbc.idle-removal-interval instead. - */ - @ConfigItem(defaultValue = "5M") - @Deprecated - public Optional idleRemovalInterval = Optional.of(Duration.ofMinutes(5)); - - /** - * @deprecated use quarkus.datasource.jdbc.max-lifetime instead. - */ - @ConfigItem - @Deprecated - public Optional maxLifetime = Optional.empty(); - - /** - * @deprecated use quarkus.datasource.jdbc.transaction-isolation-level instead. - */ - @ConfigItem - @Deprecated - public Optional transactionIsolationLevel = Optional.empty(); - - /** - * @deprecated use quarkus.datasource.jdbc.detect-statement-leaks instead. - */ - @ConfigItem(defaultValue = "true") - @Deprecated - public boolean detectStatementLeaks = true; - - /** - * @deprecated use quarkus.datasource.jdbc.new-connection-sql instead. - */ - @ConfigItem - @Deprecated - public Optional newConnectionSql = Optional.empty(); - - /** - * @deprecated use quarkus.datasource.jdbc.validation-query-sql instead. - */ - @ConfigItem - @Deprecated - public Optional validationQuerySql = Optional.empty(); -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcBuildTimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcBuildTimeConfig.java deleted file mode 100644 index ba67f54c2b5ac..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcBuildTimeConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.quarkus.agroal.runtime; - -import java.util.Map; - -import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConfigPhase; -import io.quarkus.runtime.annotations.ConfigRoot; - -/** - * This configuration class is here for compatibility reason and is planned for removal. - */ -@Deprecated -@ConfigRoot(name = "datasource", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public class LegacyDataSourcesJdbcBuildTimeConfig { - - /** - * The default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public LegacyDataSourceJdbcBuildTimeConfig defaultDataSource; - - /** - * Additional named datasources. - */ - @ConfigDocSection - @ConfigDocMapKey("datasource-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map namedDataSources; -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcRuntimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcRuntimeConfig.java deleted file mode 100644 index b32b7394f9f6a..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/LegacyDataSourcesJdbcRuntimeConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.quarkus.agroal.runtime; - -import java.util.Map; - -import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConfigPhase; -import io.quarkus.runtime.annotations.ConfigRoot; - -/** - * This configuration class is here for compatibility reason and is planned for removal. - */ -@Deprecated -@ConfigRoot(name = "datasource", phase = ConfigPhase.RUN_TIME) -public class LegacyDataSourcesJdbcRuntimeConfig { - - /** - * The default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public LegacyDataSourceJdbcRuntimeConfig defaultDataSource; - - /** - * Additional named datasources. - */ - @ConfigDocSection - @ConfigDocMapKey("datasource-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map namedDataSources; -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/UnconfiguredDataSource.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/UnconfiguredDataSource.java new file mode 100644 index 0000000000000..0e30f2a092dff --- /dev/null +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/UnconfiguredDataSource.java @@ -0,0 +1,104 @@ +package io.quarkus.agroal.runtime; + +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.logging.Logger; + +import io.agroal.api.AgroalDataSource; +import io.agroal.api.AgroalDataSourceMetrics; +import io.agroal.api.AgroalPoolInterceptor; +import io.agroal.api.configuration.AgroalDataSourceConfiguration; +import io.quarkus.runtime.configuration.ConfigurationException; + +public class UnconfiguredDataSource implements AgroalDataSource { + + private final String errorMessage; + + public UnconfiguredDataSource(String errorMessage) { + this.errorMessage = errorMessage; + } + + public void throwException() { + throw new ConfigurationException(errorMessage); + } + + @Override + public AgroalDataSourceConfiguration getConfiguration() { + throw new ConfigurationException(errorMessage); + } + + @Override + public AgroalDataSourceMetrics getMetrics() { + throw new ConfigurationException(errorMessage); + } + + @Override + public void flush(FlushMode mode) { + throw new ConfigurationException(errorMessage); + } + + @Override + public void setPoolInterceptors(Collection interceptors) { + //noop + } + + @Override + public List getPoolInterceptors() { + return Collections.emptyList(); + } + + @Override + public void close() { + + } + + @Override + public Connection getConnection() throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public PrintWriter getLogWriter() throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public void setLogWriter(PrintWriter out) throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public void setLoginTimeout(int seconds) throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public int getLoginTimeout() throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + throw new ConfigurationException(errorMessage); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new ConfigurationException(errorMessage); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return false; + } +} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalCounter.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalCounter.java deleted file mode 100644 index 4fb7605927e6d..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalCounter.java +++ /dev/null @@ -1,100 +0,0 @@ -package io.quarkus.agroal.runtime.metrics; - -import org.eclipse.microprofile.metrics.Counter; - -import io.agroal.api.AgroalDataSource; -import io.agroal.api.AgroalDataSourceMetrics; -import io.quarkus.agroal.DataSource.DataSourceLiteral; -import io.quarkus.arc.Arc; -import io.quarkus.datasource.common.runtime.DataSourceUtil; - -public class AgroalCounter implements Counter { - - private String dataSourceName; - private volatile AgroalDataSource dataSource; - private String metric; - - public AgroalCounter() { - - } - - /** - * @param dataSourceName Which datasource should be queried for metric - * @param metricName Name of the method from DataSource.getMetrics() that should be called to retrieve the particular value. - * This has nothing to do with the metric name from MP Metrics point of view! - */ - public AgroalCounter(String dataSourceName, String metricName) { - this.dataSourceName = dataSourceName; - this.metric = metricName; - } - - public String getDataSourceName() { - return dataSourceName; - } - - public void setDataSourceName(String dataSourceName) { - this.dataSourceName = dataSourceName; - } - - private AgroalDataSource getDataSource() { - AgroalDataSource dsLocal = dataSource; - if (dsLocal == null) { - synchronized (this) { - dsLocal = dataSource; - if (dsLocal == null) { - if (dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)) { - dataSource = dsLocal = Arc.container().instance(AgroalDataSource.class).get(); - } else { - dataSource = dsLocal = Arc.container() - .instance(AgroalDataSource.class, new DataSourceLiteral(dataSourceName)) - .get(); - } - } - } - } - return dsLocal; - } - - public void setDataSource(AgroalDataSource dataSource) { - this.dataSource = dataSource; - } - - public String getMetric() { - return metric; - } - - public void setMetric(String metric) { - this.metric = metric; - } - - @Override - public void inc() { - } - - @Override - public void inc(long n) { - } - - @Override - public long getCount() { - AgroalDataSourceMetrics metrics = getDataSource().getMetrics(); - switch (metric) { - case "acquireCount": - return metrics.acquireCount(); - case "creationCount": - return metrics.creationCount(); - case "leakDetectionCount": - return metrics.leakDetectionCount(); - case "destroyCount": - return metrics.destroyCount(); - case "flushCount": - return metrics.flushCount(); - case "invalidCount": - return metrics.invalidCount(); - case "reapCount": - return metrics.reapCount(); - default: - throw new IllegalArgumentException("Unknown datasource metric"); - } - } -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalGauge.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalGauge.java deleted file mode 100644 index 22c8654923de4..0000000000000 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalGauge.java +++ /dev/null @@ -1,94 +0,0 @@ -package io.quarkus.agroal.runtime.metrics; - -import org.eclipse.microprofile.metrics.Gauge; - -import io.agroal.api.AgroalDataSource; -import io.agroal.api.AgroalDataSourceMetrics; -import io.quarkus.agroal.DataSource.DataSourceLiteral; -import io.quarkus.arc.Arc; -import io.quarkus.datasource.common.runtime.DataSourceUtil; - -public class AgroalGauge implements Gauge { - - private String dataSourceName; - private volatile AgroalDataSource dataSource; - private String metric; - - public AgroalGauge() { - - } - - /** - * @param dataSourceName Which datasource should be queried for metric - * @param metricName Name of the method from DataSource.getMetrics() that should be called to retrieve the particular value. - * This has nothing to do with the metric name from MP Metrics point of view! - */ - public AgroalGauge(String dataSourceName, String metricName) { - this.dataSourceName = dataSourceName; - this.metric = metricName; - } - - public String getDataSourceName() { - return dataSourceName; - } - - public void setDataSourceName(String dataSourceName) { - this.dataSourceName = dataSourceName; - } - - public String getMetric() { - return metric; - } - - public void setMetric(String metric) { - this.metric = metric; - } - - private AgroalDataSource getDataSource() { - AgroalDataSource dsLocal = dataSource; - if (dsLocal == null) { - synchronized (this) { - dsLocal = dataSource; - if (dsLocal == null) { - if (dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)) { - dataSource = dsLocal = Arc.container().instance(AgroalDataSource.class).get(); - } else { - dataSource = dsLocal = Arc.container() - .instance(AgroalDataSource.class, new DataSourceLiteral(dataSourceName)) - .get(); - } - } - } - } - return dsLocal; - } - - @Override - public Long getValue() { - AgroalDataSourceMetrics metrics = getDataSource().getMetrics(); - switch (metric) { - case "activeCount": - return metrics.activeCount(); - case "availableCount": - return metrics.availableCount(); - case "maxUsedCount": - return metrics.maxUsedCount(); - case "awaitingCount": - return metrics.awaitingCount(); - case "blockingTimeAverage": - return metrics.blockingTimeAverage().toMillis(); - case "blockingTimeMax": - return metrics.blockingTimeMax().toMillis(); - case "blockingTimeTotal": - return metrics.blockingTimeTotal().toMillis(); - case "creationTimeAverage": - return metrics.creationTimeAverage().toMillis(); - case "creationTimeMax": - return metrics.creationTimeMax().toMillis(); - case "creationTimeTotal": - return metrics.creationTimeTotal().toMillis(); - default: - throw new IllegalArgumentException("Unknown data source metric"); - } - } -} diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalMetricsRecorder.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalMetricsRecorder.java new file mode 100644 index 0000000000000..081ef0c0b6d95 --- /dev/null +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/metrics/AgroalMetricsRecorder.java @@ -0,0 +1,127 @@ +package io.quarkus.agroal.runtime.metrics; + +import java.time.Duration; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import io.agroal.api.AgroalDataSource; +import io.agroal.api.AgroalDataSourceMetrics; +import io.quarkus.agroal.DataSource; +import io.quarkus.arc.Arc; +import io.quarkus.datasource.common.runtime.DataSourceUtil; +import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.runtime.metrics.MetricsFactory; + +/** + * Subclasses are created and registered as {literal @}Dependent beans for each + * datasource that has metrics enabled. + */ +@Recorder +public class AgroalMetricsRecorder { + static Function, Long> convertToMillis = new Function, Long>() { + @Override + public Long apply(Supplier durationSupplier) { + return durationSupplier.get().toMillis(); + } + }; + + /* RUNTIME_INIT */ + public Consumer registerDataSourceMetrics(String dataSourceName) { + return new Consumer() { + @Override + public void accept(MetricsFactory metricsFactory) { + String tagValue = DataSourceUtil.isDefault(dataSourceName) ? "default" : dataSourceName; + AgroalDataSourceMetrics metrics = getDataSource(dataSourceName).getMetrics(); + + metricsFactory.builder("agroal.active.count") + .description( + "Number of active connections. These connections are in use and not available to be acquired.") + .tag("datasource", tagValue) + .buildGauge(metrics::activeCount); + metricsFactory.builder("agroal.available.count") + .description("Number of idle connections in the pool, available to be acquired.") + .tag("datasource", tagValue) + .buildGauge(metrics::availableCount); + metricsFactory.builder("agroal.max.used.count") + .description("Maximum number of connections active simultaneously.") + .tag("datasource", tagValue) + .buildGauge(metrics::maxUsedCount); + metricsFactory.builder("agroal.awaiting.count") + .description("Approximate number of threads blocked, waiting to acquire a connection.") + .tag("datasource", tagValue) + .buildGauge(metrics::awaitingCount); + + metricsFactory.builder("agroal.acquire.count") + .description("Number of times an acquire operation succeeded.") + .tag("datasource", tagValue) + .buildCounter(metrics::acquireCount); + metricsFactory.builder("agroal.creation.count") + .description("Number of created connections.") + .tag("datasource", tagValue) + .buildCounter(metrics::creationCount); + metricsFactory.builder("agroal.leak.detection.count") + .description("Number of times a leak was detected. A single connection can be detected multiple times.") + .tag("datasource", tagValue) + .buildCounter(metrics::leakDetectionCount); + metricsFactory.builder("agroal.destroy.count") + .description("Number of destroyed connections.") + .tag("datasource", tagValue) + .buildCounter(metrics::destroyCount); + metricsFactory.builder("agroal.flush.count") + .description("Number of connections removed from the pool, not counting invalid / idle.") + .tag("datasource", tagValue) + .buildCounter(metrics::flushCount); + metricsFactory.builder("agroal.invalid.count") + .description("Number of connections removed from the pool for being idle.") + .tag("datasource", tagValue) + .buildCounter(metrics::invalidCount); + metricsFactory.builder("agroal.reap.count") + .description("Number of connections removed from the pool for being idle.") + .tag("datasource", tagValue) + .buildCounter(metrics::reapCount); + + metricsFactory.builder("agroal.blocking.time.average") + .description("Average time an application waited to acquire a connection.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::blockingTimeAverage, convertToMillis); + metricsFactory.builder("agroal.blocking.time.max") + .description("Maximum time an application waited to acquire a connection.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::blockingTimeMax, convertToMillis); + metricsFactory.builder("agroal.blocking.time.total") + .description("Total time applications waited to acquire a connection.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::blockingTimeTotal, convertToMillis); + metricsFactory.builder("agroal.creation.time.average") + .description("Average time for a connection to be created.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::creationTimeAverage, convertToMillis); + metricsFactory.builder("agroal.creation.time.max") + .description("Maximum time for a connection to be created.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::creationTimeMax, convertToMillis); + metricsFactory.builder("agroal.creation.time.total") + .description("Total time waiting for connections to be created.") + .tag("datasource", tagValue) + .unit("milliseconds") + .buildGauge(metrics::creationTimeTotal, convertToMillis); + } + }; + } + + private AgroalDataSource getDataSource(String dataSourceName) { + if (dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)) { + return Arc.container().instance(AgroalDataSource.class).get(); + } else { + return Arc.container() + .instance(AgroalDataSource.class, new DataSource.DataSourceLiteral(dataSourceName)) + .get(); + } + } +} diff --git a/extensions/agroal/spi/pom.xml b/extensions/agroal/spi/pom.xml index b3805de67593d..dc8fe8e1ef587 100644 --- a/extensions/agroal/spi/pom.xml +++ b/extensions/agroal/spi/pom.xml @@ -6,7 +6,6 @@ quarkus-agroal-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 diff --git a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceBuildItem.java b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceBuildItem.java similarity index 95% rename from extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceBuildItem.java rename to extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceBuildItem.java index 82806454396e0..f584fb0ebea2b 100644 --- a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceBuildItem.java +++ b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceBuildItem.java @@ -1,4 +1,4 @@ -package io.quarkus.agroal.deployment; +package io.quarkus.agroal.spi; import io.quarkus.builder.item.MultiBuildItem; diff --git a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceSchemaReadyBuildItem.java b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceSchemaReadyBuildItem.java similarity index 94% rename from extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceSchemaReadyBuildItem.java rename to extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceSchemaReadyBuildItem.java index 280ab686ba38f..687ed1ba40362 100644 --- a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDataSourceSchemaReadyBuildItem.java +++ b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDataSourceSchemaReadyBuildItem.java @@ -1,4 +1,4 @@ -package io.quarkus.agroal.deployment; +package io.quarkus.agroal.spi; import java.util.Collection; diff --git a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDriverBuildItem.java b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDriverBuildItem.java similarity index 96% rename from extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDriverBuildItem.java rename to extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDriverBuildItem.java index 00dd028786c04..bd8849bb2f8d6 100644 --- a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/deployment/JdbcDriverBuildItem.java +++ b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/JdbcDriverBuildItem.java @@ -1,4 +1,4 @@ -package io.quarkus.agroal.deployment; +package io.quarkus.agroal.spi; import java.util.Optional; diff --git a/extensions/amazon-alexa/deployment/pom.xml b/extensions/amazon-alexa/deployment/pom.xml index 4f526c040370b..435fe773c1a83 100644 --- a/extensions/amazon-alexa/deployment/pom.xml +++ b/extensions/amazon-alexa/deployment/pom.xml @@ -15,10 +15,6 @@ Quarkus - Amazon Alexa - Deployment - - io.quarkus - quarkus-core-deployment - io.quarkus quarkus-amazon-alexa @@ -26,7 +22,7 @@ io.quarkus - quarkus-jackson + quarkus-jackson-deployment diff --git a/extensions/amazon-alexa/pom.xml b/extensions/amazon-alexa/pom.xml index c7b0b8764b9f1..b9d123c8d7066 100644 --- a/extensions/amazon-alexa/pom.xml +++ b/extensions/amazon-alexa/pom.xml @@ -5,10 +5,10 @@ 4.0.0 - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml quarkus-amazon-alexa-parent diff --git a/extensions/amazon-alexa/runtime/pom.xml b/extensions/amazon-alexa/runtime/pom.xml index 8053553d747a8..fb60b57c1bf2c 100644 --- a/extensions/amazon-alexa/runtime/pom.xml +++ b/extensions/amazon-alexa/runtime/pom.xml @@ -58,7 +58,11 @@ org.jboss.slf4j - slf4j-jboss-logging + slf4j-jboss-logmanager + + + io.quarkus + quarkus-jackson diff --git a/extensions/amazon-lambda-http/deployment/pom.xml b/extensions/amazon-lambda-http/deployment/pom.xml index 56903db647fde..be40bb1244994 100644 --- a/extensions/amazon-lambda-http/deployment/pom.xml +++ b/extensions/amazon-lambda-http/deployment/pom.xml @@ -31,10 +31,6 @@ io.quarkus quarkus-amazon-lambda-http - - org.graalvm.nativeimage - svm - diff --git a/extensions/amazon-lambda-http/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java b/extensions/amazon-lambda-http/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java index 14280d0cf7c62..f4cbe7755bf34 100644 --- a/extensions/amazon-lambda-http/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java +++ b/extensions/amazon-lambda-http/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java @@ -2,17 +2,12 @@ import org.jboss.logging.Logger; +import com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPEvent; +import com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPResponse; + import io.quarkus.amazon.lambda.deployment.LambdaUtil; import io.quarkus.amazon.lambda.deployment.ProvidedAmazonLambdaHandlerBuildItem; import io.quarkus.amazon.lambda.http.LambdaHttpHandler; -import io.quarkus.amazon.lambda.http.model.AlbContext; -import io.quarkus.amazon.lambda.http.model.ApiGatewayAuthorizerContext; -import io.quarkus.amazon.lambda.http.model.ApiGatewayRequestIdentity; -import io.quarkus.amazon.lambda.http.model.AwsProxyRequest; -import io.quarkus.amazon.lambda.http.model.AwsProxyRequestContext; -import io.quarkus.amazon.lambda.http.model.AwsProxyResponse; -import io.quarkus.amazon.lambda.http.model.CognitoAuthorizerClaims; -import io.quarkus.amazon.lambda.http.model.ErrorModel; import io.quarkus.amazon.lambda.http.model.Headers; import io.quarkus.amazon.lambda.http.model.MultiValuedTreeMap; import io.quarkus.deployment.annotations.BuildProducer; @@ -43,16 +38,14 @@ public ProvidedAmazonLambdaHandlerBuildItem setHandler() { public void registerReflectionClasses(BuildProducer reflectiveClassBuildItemBuildProducer) { reflectiveClassBuildItemBuildProducer .produce(new ReflectiveClassBuildItem(true, true, true, - AlbContext.class, - ApiGatewayAuthorizerContext.class, - ApiGatewayRequestIdentity.class, - AwsProxyRequest.class, - AwsProxyRequestContext.class, - AwsProxyResponse.class, - CognitoAuthorizerClaims.class, - ErrorModel.class, - Headers.class, - MultiValuedTreeMap.class)); + APIGatewayV2HTTPEvent.class, + APIGatewayV2HTTPEvent.RequestContext.class, + APIGatewayV2HTTPEvent.RequestContext.Http.class, + APIGatewayV2HTTPEvent.RequestContext.Authorizer.class, + APIGatewayV2HTTPEvent.RequestContext.CognitoIdentity.class, + APIGatewayV2HTTPEvent.RequestContext.IAM.class, + APIGatewayV2HTTPEvent.RequestContext.Authorizer.JWT.class, + APIGatewayV2HTTPResponse.class, Headers.class, MultiValuedTreeMap.class)); } /** diff --git a/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.jvm.yaml b/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.jvm.yaml index a2476a2a1a648..0d41bbbf07590 100644 --- a/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.jvm.yaml +++ b/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.jvm.yaml @@ -18,15 +18,12 @@ Policies: AWSLambdaBasicExecutionRole Timeout: 15 Events: - GetResource: - Type: Api - Properties: - Path: /{proxy+} - Method: any + HttpApiEvent: + Type: HttpApi Outputs: ${lambdaName}Api: Description: URL for application - Value: !Sub 'https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/' + Value: !Sub 'https://${ServerlessHttpApi}.execute-api.${AWS::Region}.amazonaws.com/' Export: Name: ${lambdaName}Api diff --git a/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.native.yaml b/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.native.yaml index 80d6075f0a3b3..88ab665088ee6 100644 --- a/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.native.yaml +++ b/extensions/amazon-lambda-http/deployment/src/main/resources/http/sam.native.yaml @@ -21,15 +21,12 @@ Variables: DISABLE_SIGNAL_HANDLERS: true Events: - GetResource: - Type: Api - Properties: - Path: /{proxy+} - Method: any + HttpApiEvent: + Type: HttpApi Outputs: ${lambdaName}NativeApi: Description: URL for application - Value: !Sub 'https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/' + Value: !Sub 'https://${ServerlessHttpApi}.execute-api.${AWS::Region}.amazonaws.com/' Export: Name: ${lambdaName}NativeApi diff --git a/extensions/amazon-lambda-http/maven-archetype/pom.xml b/extensions/amazon-lambda-http/maven-archetype/pom.xml index f3924edd417c4..dce1a1e348f69 100644 --- a/extensions/amazon-lambda-http/maven-archetype/pom.xml +++ b/extensions/amazon-lambda-http/maven-archetype/pom.xml @@ -6,7 +6,6 @@ quarkus-amazon-lambda-http-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 diff --git a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/extensions/amazon-lambda-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml index 2fa0f4f537844..b1292683a4bb4 100644 --- a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml +++ b/extensions/amazon-lambda-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml @@ -9,7 +9,7 @@ to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an diff --git a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/pom.xml b/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/pom.xml index 24ece3b6b497b..528362e2869d6 100644 --- a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/pom.xml +++ b/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/pom.xml @@ -73,9 +73,6 @@ io.quarkus quarkus-maven-plugin \${quarkus-plugin.version} - - true - @@ -108,47 +105,9 @@ native - - - - io.quarkus - quarkus-maven-plugin - \${quarkus-plugin.version} - - - - native-image - - - true - - - - - - org.apache.maven.plugins - maven-assembly-plugin - \${assembly-plugin.version} - - - zip-assembly - package - - single - - - function - - src${file.separator}assembly${file.separator}zip.xml - - false - false - - - - - - + + native +
    diff --git a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/src/assembly/zip.xml b/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/src/assembly/zip.xml deleted file mode 100644 index 54d667c9b7dc9..0000000000000 --- a/extensions/amazon-lambda-http/maven-archetype/src/main/resources/archetype-resources/src/assembly/zip.xml +++ /dev/null @@ -1,17 +0,0 @@ - - lambda-package - - zip - - false - - - ${project.build.directory}${file.separator}${artifactId}-${version}-runner - / - bootstrap - 755 - - - diff --git a/extensions/amazon-lambda-http/pom.xml b/extensions/amazon-lambda-http/pom.xml index a3f26fdc8d883..61c49706a6b97 100644 --- a/extensions/amazon-lambda-http/pom.xml +++ b/extensions/amazon-lambda-http/pom.xml @@ -5,10 +5,10 @@ 4.0.0 - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml quarkus-amazon-lambda-http-parent diff --git a/extensions/amazon-lambda-http/runtime/pom.xml b/extensions/amazon-lambda-http/runtime/pom.xml index d85203509b16e..032f4a25bc399 100644 --- a/extensions/amazon-lambda-http/runtime/pom.xml +++ b/extensions/amazon-lambda-http/runtime/pom.xml @@ -29,12 +29,13 @@ quarkus-core - com.amazonaws.serverless - aws-serverless-java-container-core + com.amazonaws + aws-lambda-java-core org.graalvm.nativeimage svm + provided diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java b/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java index 539a94f9f2b71..fa99269e88ee1 100644 --- a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java +++ b/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java @@ -2,20 +2,21 @@ import java.io.ByteArrayOutputStream; import java.net.InetSocketAddress; -import java.net.URLEncoder; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; import java.nio.charset.StandardCharsets; import java.util.Base64; -import java.util.List; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import java.util.concurrent.CompletableFuture; import org.jboss.logging.Logger; -import com.amazonaws.serverless.proxy.internal.LambdaContainerHandler; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPEvent; +import com.amazonaws.services.lambda.runtime.events.APIGatewayV2HTTPResponse; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; @@ -29,51 +30,56 @@ import io.netty.handler.codec.http.HttpVersion; import io.netty.handler.codec.http.LastHttpContent; import io.netty.util.ReferenceCountUtil; -import io.quarkus.amazon.lambda.http.model.AwsProxyRequest; -import io.quarkus.amazon.lambda.http.model.AwsProxyResponse; import io.quarkus.amazon.lambda.http.model.Headers; import io.quarkus.netty.runtime.virtual.VirtualClientConnection; import io.quarkus.netty.runtime.virtual.VirtualResponseHandler; +import io.quarkus.vertx.http.runtime.QuarkusHttpHeaders; import io.quarkus.vertx.http.runtime.VertxHttpRecorder; @SuppressWarnings("unused") -public class LambdaHttpHandler implements RequestHandler { +public class LambdaHttpHandler implements RequestHandler { private static final Logger log = Logger.getLogger("quarkus.amazon.lambda.http"); + private static final int BUFFER_SIZE = 8096; + private static Headers errorHeaders = new Headers(); static { errorHeaders.putSingle("Content-Type", "application/json"); } - public AwsProxyResponse handleRequest(AwsProxyRequest request, Context context) { + public APIGatewayV2HTTPResponse handleRequest(APIGatewayV2HTTPEvent request, Context context) { InetSocketAddress clientAddress = null; - if (request.getRequestContext() != null && request.getRequestContext().getIdentity() != null) { - if (request.getRequestContext().getIdentity().getSourceIp() != null) { - clientAddress = new InetSocketAddress(request.getRequestContext().getIdentity().getSourceIp(), 443); + if (request.getRequestContext() != null && request.getRequestContext().getHttp() != null) { + if (request.getRequestContext().getHttp().getSourceIp() != null) { + clientAddress = new InetSocketAddress(request.getRequestContext().getHttp().getSourceIp(), 443); } } try { - return nettyDispatch(clientAddress, request); + return nettyDispatch(clientAddress, request, context); } catch (Exception e) { log.error("Request Failure", e); - return new AwsProxyResponse(500, errorHeaders, "{ \"message\": \"Internal Server Error\" }"); + APIGatewayV2HTTPResponse res = new APIGatewayV2HTTPResponse(); + res.setStatusCode(500); + res.setBody("{ \"message\": \"Internal Server Error\" }"); + res.setMultiValueHeaders(errorHeaders); + return res; } } private class NettyResponseHandler implements VirtualResponseHandler { - AwsProxyResponse responseBuilder = new AwsProxyResponse(); + APIGatewayV2HTTPResponse responseBuilder = new APIGatewayV2HTTPResponse(); ByteArrayOutputStream baos; WritableByteChannel byteChannel; - final AwsProxyRequest request; - CompletableFuture future = new CompletableFuture<>(); + final APIGatewayV2HTTPEvent request; + CompletableFuture future = new CompletableFuture<>(); - public NettyResponseHandler(AwsProxyRequest request) { + public NettyResponseHandler(APIGatewayV2HTTPEvent request) { this.request = request; } - public CompletableFuture getFuture() { + public CompletableFuture getFuture() { return future; } @@ -86,13 +92,11 @@ public void handleMessage(Object msg) { HttpResponse res = (HttpResponse) msg; responseBuilder.setStatusCode(res.status().code()); - if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { - responseBuilder.setStatusDescription(res.status().reasonPhrase()); - } - responseBuilder.setMultiValueHeaders(new Headers()); + Headers multiValueHeaders = new Headers(); + responseBuilder.setMultiValueHeaders(multiValueHeaders); for (String name : res.headers().names()) { for (String v : res.headers().getAll(name)) { - responseBuilder.getMultiValueHeaders().add(name, v); + multiValueHeaders.add(name, v); } } } @@ -118,11 +122,11 @@ public void handleMessage(Object msg) { } if (msg instanceof LastHttpContent) { if (baos != null) { - if (isBinary(responseBuilder.getMultiValueHeaders().getFirst("Content-Type"))) { - responseBuilder.setBase64Encoded(true); + if (isBinary(((Headers) responseBuilder.getMultiValueHeaders()).getFirst("Content-Type"))) { + responseBuilder.setIsBase64Encoded(true); responseBuilder.setBody(Base64.getMimeEncoder().encodeToString(baos.toByteArray())); } else { - responseBuilder.setBody(new String(baos.toByteArray(), "UTF-8")); + responseBuilder.setBody(new String(baos.toByteArray(), StandardCharsets.UTF_8)); } } future.complete(responseBuilder); @@ -143,38 +147,25 @@ public void close() { } } - private AwsProxyResponse nettyDispatch(InetSocketAddress clientAddress, AwsProxyRequest request) throws Exception { - String path = request.getPath(); + private APIGatewayV2HTTPResponse nettyDispatch(InetSocketAddress clientAddress, APIGatewayV2HTTPEvent request, + Context context) + throws Exception { + StringBuilder sb = new StringBuilder(request.getRawPath()); + sb.append('?').append(request.getRawQueryString()); + String path = new StringBuilder(request.getRawPath()).append('?').append(request.getRawQueryString()).toString(); //log.info("---- Got lambda request: " + path); - if (request.getMultiValueQueryStringParameters() != null && !request.getMultiValueQueryStringParameters().isEmpty()) { - StringBuilder sb = new StringBuilder(path); - sb.append("?"); - boolean first = true; - for (Map.Entry> e : request.getMultiValueQueryStringParameters().entrySet()) { - for (String v : e.getValue()) { - if (first) { - first = false; - } else { - sb.append("&"); - } - if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { - sb.append(e.getKey()); - sb.append("="); - sb.append(v); - } else { - sb.append(URLEncoder.encode(e.getKey(), StandardCharsets.UTF_8.name())); - sb.append("="); - sb.append(URLEncoder.encode(v, StandardCharsets.UTF_8.name())); - } - } - } - path = sb.toString(); - } + QuarkusHttpHeaders quarkusHeaders = new QuarkusHttpHeaders(); + quarkusHeaders.setContextObject(Context.class, context); + quarkusHeaders.setContextObject(APIGatewayV2HTTPEvent.class, request); + quarkusHeaders.setContextObject(APIGatewayV2HTTPEvent.RequestContext.class, request.getRequestContext()); DefaultHttpRequest nettyRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, - HttpMethod.valueOf(request.getHttpMethod()), path); - if (request.getMultiValueHeaders() != null) { //apparently this can be null if no headers are sent - for (Map.Entry> header : request.getMultiValueHeaders().entrySet()) { - nettyRequest.headers().add(header.getKey(), header.getValue()); + HttpMethod.valueOf(request.getRequestContext().getHttp().getMethod()), path, quarkusHeaders); + if (request.getHeaders() != null) { //apparently this can be null if no headers are sent + for (Map.Entry header : request.getHeaders().entrySet()) { + if (header.getValue() != null) { + for (String val : header.getValue().split(",")) + nettyRequest.headers().add(header.getKey(), val); + } } } if (!nettyRequest.headers().contains(HttpHeaderNames.HOST)) { @@ -183,7 +174,7 @@ private AwsProxyResponse nettyDispatch(InetSocketAddress clientAddress, AwsProxy HttpContent requestContent = LastHttpContent.EMPTY_LAST_CONTENT; if (request.getBody() != null) { - if (request.isBase64Encoded()) { + if (request.getIsBase64Encoded()) { ByteBuf body = Unpooled.wrappedBuffer(Base64.getMimeDecoder().decode(request.getBody())); requestContent = new DefaultLastHttpContent(body); } else { @@ -205,18 +196,27 @@ private AwsProxyResponse nettyDispatch(InetSocketAddress clientAddress, AwsProxy } private ByteArrayOutputStream createByteStream() { - ByteArrayOutputStream baos;// todo what is right size? - baos = new ByteArrayOutputStream(1000); + ByteArrayOutputStream baos; + baos = new ByteArrayOutputStream(BUFFER_SIZE); return baos; } + static Set binaryTypes = new HashSet<>(); + + static { + binaryTypes.add("application/octet-stream"); + binaryTypes.add("image/jpeg"); + binaryTypes.add("image/png"); + binaryTypes.add("image/gif"); + } + private boolean isBinary(String contentType) { if (contentType != null) { int index = contentType.indexOf(';'); if (index >= 0) { - return LambdaContainerHandler.getContainerConfig().isBinaryContentType(contentType.substring(0, index)); + return binaryTypes.contains(contentType.substring(0, index)); } else { - return LambdaContainerHandler.getContainerConfig().isBinaryContentType(contentType); + return binaryTypes.contains(contentType); } } return false; diff --git a/extensions/amazon-lambda-rest/deployment/pom.xml b/extensions/amazon-lambda-rest/deployment/pom.xml new file mode 100644 index 0000000000000..bb4da0e13edbc --- /dev/null +++ b/extensions/amazon-lambda-rest/deployment/pom.xml @@ -0,0 +1,51 @@ + + + 4.0.0 + + + io.quarkus + quarkus-amazon-lambda-rest-parent + 999-SNAPSHOT + ../pom.xml + + + quarkus-amazon-lambda-rest-deployment + Quarkus - Amazon Lambda AWS Gateway REST API - Deployment + + + + io.quarkus + quarkus-core-deployment + + + io.quarkus + quarkus-vertx-http-deployment + + + io.quarkus + quarkus-amazon-lambda-deployment + + + io.quarkus + quarkus-amazon-lambda-rest + + + + + + maven-compiler-plugin + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + \ No newline at end of file diff --git a/extensions/amazon-lambda-rest/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java b/extensions/amazon-lambda-rest/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java new file mode 100644 index 0000000000000..14280d0cf7c62 --- /dev/null +++ b/extensions/amazon-lambda-rest/deployment/src/main/java/io/quarkus/amazon/lambda/http/deployment/AmazonLambdaHttpProcessor.java @@ -0,0 +1,83 @@ +package io.quarkus.amazon.lambda.http.deployment; + +import org.jboss.logging.Logger; + +import io.quarkus.amazon.lambda.deployment.LambdaUtil; +import io.quarkus.amazon.lambda.deployment.ProvidedAmazonLambdaHandlerBuildItem; +import io.quarkus.amazon.lambda.http.LambdaHttpHandler; +import io.quarkus.amazon.lambda.http.model.AlbContext; +import io.quarkus.amazon.lambda.http.model.ApiGatewayAuthorizerContext; +import io.quarkus.amazon.lambda.http.model.ApiGatewayRequestIdentity; +import io.quarkus.amazon.lambda.http.model.AwsProxyRequest; +import io.quarkus.amazon.lambda.http.model.AwsProxyRequestContext; +import io.quarkus.amazon.lambda.http.model.AwsProxyResponse; +import io.quarkus.amazon.lambda.http.model.CognitoAuthorizerClaims; +import io.quarkus.amazon.lambda.http.model.ErrorModel; +import io.quarkus.amazon.lambda.http.model.Headers; +import io.quarkus.amazon.lambda.http.model.MultiValuedTreeMap; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.builditem.SystemPropertyBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; +import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; +import io.quarkus.runtime.LaunchMode; +import io.quarkus.vertx.http.deployment.RequireVirtualHttpBuildItem; +import io.vertx.core.file.impl.FileResolver; + +public class AmazonLambdaHttpProcessor { + private static final Logger log = Logger.getLogger(AmazonLambdaHttpProcessor.class); + + @BuildStep + public RequireVirtualHttpBuildItem requestVirtualHttp(LaunchModeBuildItem launchMode) { + return launchMode.getLaunchMode() == LaunchMode.NORMAL ? RequireVirtualHttpBuildItem.MARKER : null; + } + + @BuildStep + public ProvidedAmazonLambdaHandlerBuildItem setHandler() { + return new ProvidedAmazonLambdaHandlerBuildItem(LambdaHttpHandler.class, "AWS Lambda HTTP"); + } + + @BuildStep + public void registerReflectionClasses(BuildProducer reflectiveClassBuildItemBuildProducer) { + reflectiveClassBuildItemBuildProducer + .produce(new ReflectiveClassBuildItem(true, true, true, + AlbContext.class, + ApiGatewayAuthorizerContext.class, + ApiGatewayRequestIdentity.class, + AwsProxyRequest.class, + AwsProxyRequestContext.class, + AwsProxyResponse.class, + CognitoAuthorizerClaims.class, + ErrorModel.class, + Headers.class, + MultiValuedTreeMap.class)); + } + + /** + * Lambda provides /tmp for temporary files. Set vertx cache dir + */ + @BuildStep + void setTempDir(BuildProducer systemProperty) { + systemProperty.produce(new SystemPropertyBuildItem(FileResolver.CACHE_DIR_BASE_PROP_NAME, "/tmp")); + } + + @BuildStep + public void generateScripts(OutputTargetBuildItem target, + BuildProducer artifactResultProducer) throws Exception { + String lambdaName = LambdaUtil.artifactToLambda(target.getBaseName()); + + String output = LambdaUtil.copyResource("lambda/bootstrap-example.sh"); + LambdaUtil.writeFile(target, "bootstrap-example.sh", output); + + output = LambdaUtil.copyResource("http/sam.jvm.yaml") + .replace("${lambdaName}", lambdaName); + LambdaUtil.writeFile(target, "sam.jvm.yaml", output); + + output = LambdaUtil.copyResource("http/sam.native.yaml") + .replace("${lambdaName}", lambdaName); + LambdaUtil.writeFile(target, "sam.native.yaml", output); + } + +} diff --git a/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.jvm.yaml b/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.jvm.yaml new file mode 100644 index 0000000000000..a2476a2a1a648 --- /dev/null +++ b/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.jvm.yaml @@ -0,0 +1,32 @@ + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: AWS Serverless Quarkus HTTP - ${artifactId} + Globals: + Api: + EndpointConfiguration: REGIONAL + BinaryMediaTypes: + - "*/*" + + Resources: + ${lambdaName}: + Type: AWS::Serverless::Function + Properties: + Handler: io.quarkus.amazon.lambda.runtime.QuarkusStreamHandler::handleRequest + Runtime: java11 + CodeUri: function.zip + MemorySize: 512 + Policies: AWSLambdaBasicExecutionRole + Timeout: 15 + Events: + GetResource: + Type: Api + Properties: + Path: /{proxy+} + Method: any + + Outputs: + ${lambdaName}Api: + Description: URL for application + Value: !Sub 'https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/' + Export: + Name: ${lambdaName}Api diff --git a/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.native.yaml b/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.native.yaml new file mode 100644 index 0000000000000..80d6075f0a3b3 --- /dev/null +++ b/extensions/amazon-lambda-rest/deployment/src/main/resources/http/sam.native.yaml @@ -0,0 +1,35 @@ + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: AWS Serverless Quarkus HTTP - ${artifactId} + Globals: + Api: + EndpointConfiguration: REGIONAL + BinaryMediaTypes: + - "*/*" + + Resources: + ${lambdaName}Native: + Type: AWS::Serverless::Function + Properties: + Handler: not.used.in.provided.runtimei + Runtime: provided + CodeUri: function.zip + MemorySize: 128 + Policies: AWSLambdaBasicExecutionRole + Timeout: 15 + Environment: + Variables: + DISABLE_SIGNAL_HANDLERS: true + Events: + GetResource: + Type: Api + Properties: + Path: /{proxy+} + Method: any + + Outputs: + ${lambdaName}NativeApi: + Description: URL for application + Value: !Sub 'https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/' + Export: + Name: ${lambdaName}NativeApi diff --git a/extensions/amazon-lambda-rest/maven-archetype/pom.xml b/extensions/amazon-lambda-rest/maven-archetype/pom.xml new file mode 100644 index 0000000000000..326475ac1feed --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/pom.xml @@ -0,0 +1,65 @@ + + + + quarkus-amazon-lambda-rest-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 + + quarkus-amazon-lambda-rest-archetype + Quarkus - Amazon Lambda AWS Gateway REST API - Archetype + maven-archetype + + + + + src/main/resources + true + + archetype-resources/pom.xml + + + + src/main/resources + false + + archetype-resources/pom.xml + + + + + + org.apache.maven.archetype + archetype-packaging + 3.0.1 + + + + + + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + \ + + + + org.apache.maven.plugins + maven-archetype-plugin + 3.0.1 + + + + integration-test + + + + + + + + diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml new file mode 100644 index 0000000000000..b1292683a4bb4 --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml @@ -0,0 +1,39 @@ + + + + + + + + src/main/java + + + src/test/java + + + src/assembly + + * + + + + diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/pom.xml b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/pom.xml new file mode 100644 index 0000000000000..2219570cb59c7 --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/pom.xml @@ -0,0 +1,113 @@ + + + 4.0.0 + \${groupId} + \${artifactId} + \${version} + + 3.1.0 + 3.8.1 + true + 1.8 + 1.8 + UTF-8 + UTF-8 + 999-SNAPSHOT + quarkus-bom + io.quarkus + 999-SNAPSHOT + 3.0.0-M5 + + + + + \${quarkus.platform.group-id} + \${quarkus.platform.artifact-id} + \${quarkus.platform.version} + pom + import + + + + + + + io.quarkus + quarkus-resteasy + + + + io.quarkus + quarkus-undertow + + + + io.quarkus + quarkus-vertx-web + + + + io.quarkus + quarkus-funqy-http + + + io.quarkus + quarkus-amazon-lambda-rest + + + io.quarkus + quarkus-junit5 + test + + + io.rest-assured + rest-assured + test + + + + + + io.quarkus + quarkus-maven-plugin + \${quarkus-plugin.version} + + + + build + + + + + + maven-compiler-plugin + \${compiler-plugin.version} + + + maven-surefire-plugin + \${surefire-plugin.version} + + + org.jboss.logmanager.LogManager + \${maven.home} + + + + + + + + native + + + native + + + + native + + + + diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingFunction.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingFunction.java new file mode 100644 index 0000000000000..a6b00f53041eb --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingFunction.java @@ -0,0 +1,11 @@ +package ${package}; + +import io.quarkus.funqy.Funq; + +public class GreetingFunction { + + @Funq + public String funqyHello() { + return "hello funqy"; + } +} diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingResource.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingResource.java new file mode 100644 index 0000000000000..63cd0cdcc30ea --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingResource.java @@ -0,0 +1,16 @@ +package ${package}; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +@Path("/hello") +public class GreetingResource { + + @GET + @Produces(MediaType.TEXT_PLAIN) + public String hello() { + return "hello jaxrs"; + } +} diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingServlet.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingServlet.java new file mode 100644 index 0000000000000..9ea933ac90f4f --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingServlet.java @@ -0,0 +1,19 @@ +package ${package}; + +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.annotation.WebServlet; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +@WebServlet(name = "ServletGreeting", urlPatterns = "/servlet/hello") +public class GreetingServlet extends HttpServlet { + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + resp.setStatus(200); + resp.addHeader("Content-Type", "text/plain"); + resp.getWriter().write("hello servlet"); + } +} diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java new file mode 100644 index 0000000000000..d3787e91a4a82 --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java @@ -0,0 +1,14 @@ +package ${package}; + +import static io.vertx.core.http.HttpMethod.*; + +import io.quarkus.vertx.web.Route; +import io.vertx.ext.web.RoutingContext; + +public class GreetingVertx { + @Route(path = "/vertx/hello", methods = GET) + void hello(RoutingContext context) { + context.response().headers().set("Content-Type", "text/plain"); + context.response().setStatusCode(200).end("hello vertx"); + } +} diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/test/java/GreetingTest.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/test/java/GreetingTest.java new file mode 100644 index 0000000000000..48e1b89cfade9 --- /dev/null +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/test/java/GreetingTest.java @@ -0,0 +1,40 @@ +package ${package}; + +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.RestAssured; +import org.junit.jupiter.api.Test; + +import static org.hamcrest.Matchers.equalTo; + +@QuarkusTest +public class GreetingTest +{ + @Test + public void testJaxrs() { + RestAssured.when().get("/hello").then() + .contentType("text/plain") + .body(equalTo("hello jaxrs")); + } + + @Test + public void testServlet() { + RestAssured.when().get("/servlet/hello").then() + .contentType("text/plain") + .body(equalTo("hello servlet")); + } + + @Test + public void testVertx() { + RestAssured.when().get("/vertx/hello").then() + .contentType("text/plain") + .body(equalTo("hello vertx")); + } + + @Test + public void testFunqy() { + RestAssured.when().get("/funqyHello").then() + .contentType("application/json") + .body(equalTo("\"hello funqy\"")); + } + +} diff --git a/extensions/amazon-lambda-rest/pom.xml b/extensions/amazon-lambda-rest/pom.xml new file mode 100644 index 0000000000000..34cb1cf262363 --- /dev/null +++ b/extensions/amazon-lambda-rest/pom.xml @@ -0,0 +1,24 @@ + + + 4.0.0 + + + quarkus-extensions-parent + io.quarkus + 999-SNAPSHOT + ../pom.xml + + + quarkus-amazon-lambda-rest-parent + Quarkus - Amazon Lambda AWS Gateway REST API + pom + + + runtime + deployment + maven-archetype + + + \ No newline at end of file diff --git a/extensions/amazon-lambda-rest/runtime/pom.xml b/extensions/amazon-lambda-rest/runtime/pom.xml new file mode 100644 index 0000000000000..a6e0f2a074f04 --- /dev/null +++ b/extensions/amazon-lambda-rest/runtime/pom.xml @@ -0,0 +1,61 @@ + + + 4.0.0 + + + io.quarkus + quarkus-amazon-lambda-rest-parent + 999-SNAPSHOT + ../pom.xml + + + quarkus-amazon-lambda-rest + Quarkus - Amazon Lambda AWS Gateway REST API- Runtime + + + + io.quarkus + quarkus-vertx-http + + + io.quarkus + quarkus-amazon-lambda + + + io.quarkus + quarkus-core + + + com.amazonaws.serverless + aws-serverless-java-container-core + + + org.graalvm.nativeimage + svm + provided + + + + + + + io.quarkus + quarkus-bootstrap-maven-plugin + + + maven-compiler-plugin + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + diff --git a/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java new file mode 100644 index 0000000000000..484264e245e4d --- /dev/null +++ b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/LambdaHttpHandler.java @@ -0,0 +1,233 @@ +package io.quarkus.amazon.lambda.http; + +import java.io.ByteArrayOutputStream; +import java.net.InetSocketAddress; +import java.net.URLEncoder; +import java.nio.channels.Channels; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +import org.jboss.logging.Logger; + +import com.amazonaws.serverless.proxy.internal.LambdaContainerHandler; +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.FileRegion; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.DefaultLastHttpContent; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; +import io.netty.util.ReferenceCountUtil; +import io.quarkus.amazon.lambda.http.model.AwsProxyRequest; +import io.quarkus.amazon.lambda.http.model.AwsProxyRequestContext; +import io.quarkus.amazon.lambda.http.model.AwsProxyResponse; +import io.quarkus.amazon.lambda.http.model.Headers; +import io.quarkus.netty.runtime.virtual.VirtualClientConnection; +import io.quarkus.netty.runtime.virtual.VirtualResponseHandler; +import io.quarkus.vertx.http.runtime.QuarkusHttpHeaders; +import io.quarkus.vertx.http.runtime.VertxHttpRecorder; + +@SuppressWarnings("unused") +public class LambdaHttpHandler implements RequestHandler { + private static final Logger log = Logger.getLogger("quarkus.amazon.lambda.http"); + + private static final int BUFFER_SIZE = 8096; + + private static Headers errorHeaders = new Headers(); + static { + errorHeaders.putSingle("Content-Type", "application/json"); + } + + public AwsProxyResponse handleRequest(AwsProxyRequest request, Context context) { + InetSocketAddress clientAddress = null; + if (request.getRequestContext() != null && request.getRequestContext().getIdentity() != null) { + if (request.getRequestContext().getIdentity().getSourceIp() != null) { + clientAddress = new InetSocketAddress(request.getRequestContext().getIdentity().getSourceIp(), 443); + } + } + + try { + return nettyDispatch(clientAddress, request, context); + } catch (Exception e) { + log.error("Request Failure", e); + return new AwsProxyResponse(500, errorHeaders, "{ \"message\": \"Internal Server Error\" }"); + } + + } + + private class NettyResponseHandler implements VirtualResponseHandler { + AwsProxyResponse responseBuilder = new AwsProxyResponse(); + ByteArrayOutputStream baos; + WritableByteChannel byteChannel; + final AwsProxyRequest request; + CompletableFuture future = new CompletableFuture<>(); + + public NettyResponseHandler(AwsProxyRequest request) { + this.request = request; + } + + public CompletableFuture getFuture() { + return future; + } + + @Override + public void handleMessage(Object msg) { + try { + //log.info("Got message: " + msg.getClass().getName()); + + if (msg instanceof HttpResponse) { + HttpResponse res = (HttpResponse) msg; + responseBuilder.setStatusCode(res.status().code()); + + if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { + responseBuilder.setStatusDescription(res.status().reasonPhrase()); + } + responseBuilder.setMultiValueHeaders(new Headers()); + for (String name : res.headers().names()) { + for (String v : res.headers().getAll(name)) { + responseBuilder.getMultiValueHeaders().add(name, v); + } + } + } + if (msg instanceof HttpContent) { + HttpContent content = (HttpContent) msg; + int readable = content.content().readableBytes(); + if (baos == null && readable > 0) { + baos = createByteStream(); + } + for (int i = 0; i < readable; i++) { + baos.write(content.content().readByte()); + } + } + if (msg instanceof FileRegion) { + FileRegion file = (FileRegion) msg; + if (file.count() > 0 && file.transferred() < file.count()) { + if (baos == null) + baos = createByteStream(); + if (byteChannel == null) + byteChannel = Channels.newChannel(baos); + file.transferTo(byteChannel, file.transferred()); + } + } + if (msg instanceof LastHttpContent) { + if (baos != null) { + if (isBinary(responseBuilder.getMultiValueHeaders().getFirst("Content-Type"))) { + responseBuilder.setBase64Encoded(true); + responseBuilder.setBody(Base64.getMimeEncoder().encodeToString(baos.toByteArray())); + } else { + responseBuilder.setBody(new String(baos.toByteArray(), StandardCharsets.UTF_8)); + } + } + future.complete(responseBuilder); + } + } catch (Throwable ex) { + future.completeExceptionally(ex); + } finally { + if (msg != null) { + ReferenceCountUtil.release(msg); + } + } + } + + @Override + public void close() { + if (!future.isDone()) + future.completeExceptionally(new RuntimeException("Connection closed")); + } + } + + private AwsProxyResponse nettyDispatch(InetSocketAddress clientAddress, AwsProxyRequest request, Context context) + throws Exception { + String path = request.getPath(); + //log.info("---- Got lambda request: " + path); + if (request.getMultiValueQueryStringParameters() != null && !request.getMultiValueQueryStringParameters().isEmpty()) { + StringBuilder sb = new StringBuilder(path); + sb.append("?"); + boolean first = true; + for (Map.Entry> e : request.getMultiValueQueryStringParameters().entrySet()) { + for (String v : e.getValue()) { + if (first) { + first = false; + } else { + sb.append("&"); + } + if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { + sb.append(e.getKey()); + sb.append("="); + sb.append(v); + } else { + sb.append(URLEncoder.encode(e.getKey(), StandardCharsets.UTF_8.name())); + sb.append("="); + sb.append(URLEncoder.encode(v, StandardCharsets.UTF_8.name())); + } + } + } + path = sb.toString(); + } + QuarkusHttpHeaders quarkusHeaders = new QuarkusHttpHeaders(); + quarkusHeaders.setContextObject(Context.class, context); + quarkusHeaders.setContextObject(AwsProxyRequestContext.class, request.getRequestContext()); + DefaultHttpRequest nettyRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, + HttpMethod.valueOf(request.getHttpMethod()), path, quarkusHeaders); + if (request.getMultiValueHeaders() != null) { //apparently this can be null if no headers are sent + for (Map.Entry> header : request.getMultiValueHeaders().entrySet()) { + nettyRequest.headers().add(header.getKey(), header.getValue()); + } + } + if (!nettyRequest.headers().contains(HttpHeaderNames.HOST)) { + nettyRequest.headers().add(HttpHeaderNames.HOST, "localhost"); + } + + HttpContent requestContent = LastHttpContent.EMPTY_LAST_CONTENT; + if (request.getBody() != null) { + if (request.isBase64Encoded()) { + ByteBuf body = Unpooled.wrappedBuffer(Base64.getMimeDecoder().decode(request.getBody())); + requestContent = new DefaultLastHttpContent(body); + } else { + ByteBuf body = Unpooled.copiedBuffer(request.getBody(), StandardCharsets.UTF_8); //TODO: do we need to look at the request encoding? + requestContent = new DefaultLastHttpContent(body); + } + } + NettyResponseHandler handler = new NettyResponseHandler(request); + VirtualClientConnection connection = VirtualClientConnection.connect(handler, VertxHttpRecorder.VIRTUAL_HTTP, + clientAddress); + + connection.sendMessage(nettyRequest); + connection.sendMessage(requestContent); + try { + return handler.getFuture().get(); + } finally { + connection.close(); + } + } + + private ByteArrayOutputStream createByteStream() { + ByteArrayOutputStream baos; + baos = new ByteArrayOutputStream(BUFFER_SIZE); + return baos; + } + + private boolean isBinary(String contentType) { + if (contentType != null) { + int index = contentType.indexOf(';'); + if (index >= 0) { + return LambdaContainerHandler.getContainerConfig().isBinaryContentType(contentType.substring(0, index)); + } else { + return LambdaContainerHandler.getContainerConfig().isBinaryContentType(contentType); + } + } + return false; + } + +} diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AlbContext.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AlbContext.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AlbContext.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AlbContext.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java similarity index 96% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java index a66872f72cbe2..293ea5f7db49a 100644 --- a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java +++ b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayAuthorizerContext.java @@ -43,6 +43,10 @@ public class ApiGatewayAuthorizerContext { //------------------------------------------------------------- @JsonAnyGetter + public Map getContextProperties() { + return contextProperties; + } + public String getContextValue(String key) { return contextProperties.get(key); } diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayRequestIdentity.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayRequestIdentity.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayRequestIdentity.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ApiGatewayRequestIdentity.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequest.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequest.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequest.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequest.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequestContext.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequestContext.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequestContext.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyRequestContext.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyResponse.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyResponse.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyResponse.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/AwsProxyResponse.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/CognitoAuthorizerClaims.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/CognitoAuthorizerClaims.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/CognitoAuthorizerClaims.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/CognitoAuthorizerClaims.java diff --git a/extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ErrorModel.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ErrorModel.java similarity index 100% rename from extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ErrorModel.java rename to extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/ErrorModel.java diff --git a/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/Headers.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/Headers.java new file mode 100644 index 0000000000000..f9115351d2835 --- /dev/null +++ b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/Headers.java @@ -0,0 +1,10 @@ +package io.quarkus.amazon.lambda.http.model; + +public class Headers extends MultiValuedTreeMap { + + private static final long serialVersionUID = 42L; + + public Headers() { + super(String.CASE_INSENSITIVE_ORDER); + } +} diff --git a/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/MultiValuedTreeMap.java b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/MultiValuedTreeMap.java new file mode 100644 index 0000000000000..2ee1669bf7198 --- /dev/null +++ b/extensions/amazon-lambda-rest/runtime/src/main/java/io/quarkus/amazon/lambda/http/model/MultiValuedTreeMap.java @@ -0,0 +1,201 @@ +package io.quarkus.amazon.lambda.http.model; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +/** + * Simple implementation of a multi valued tree map to use for case-insensitive headers + * + * @param The type for the map key + * @param The type for the map values + */ +public class MultiValuedTreeMap implements Map>, Serializable, Cloneable { + + private static final long serialVersionUID = 42L; + + private final Map> map; + + public MultiValuedTreeMap() { + map = new TreeMap<>(); + } + + public MultiValuedTreeMap(Comparator comparator) { + map = new TreeMap<>(comparator); + } + + public void add(Key key, Value value) { + List values = findKey(key); + values.add(value); + } + + public Value getFirst(Key key) { + List values = get(key); + if (values == null || values.size() == 0) { + return null; + } + return values.get(0); + } + + public void putSingle(Key key, Value value) { + List values = findKey(key); + values.clear(); + values.add(value); + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public boolean containsKey(Object key) { + return map.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return map.containsValue(value); + } + + @Override + public Set>> entrySet() { + return map.entrySet(); + } + + public boolean equals(Object o) { + return map.equals(o); + } + + @Override + public List get(Object key) { + return map.get(key); + } + + public int hashCode() { + return map.hashCode(); + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + @Override + public Set keySet() { + return map.keySet(); + } + + @Override + public List put(Key key, List value) { + return map.put(key, value); + } + + @Override + public void putAll(Map> t) { + map.putAll(t); + } + + @Override + public List remove(Object key) { + return map.remove(key); + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Collection> values() { + return map.values(); + } + + public void addAll(Key key, Value... newValues) { + for (Value value : newValues) { + add(key, value); + } + } + + public void addAll(Key key, List valueList) { + for (Value value : valueList) { + add(key, value); + } + } + + public void addFirst(Key key, Value value) { + List values = get(key); + if (values == null) { + add(key, value); + return; + } else { + values.add(0, value); + } + } + + public boolean equalsIgnoreValueOrder(MultiValuedTreeMap vmap) { + if (this == vmap) { + return true; + } + if (!keySet().equals(vmap.keySet())) { + return false; + } + for (Entry> e : entrySet()) { + List olist = vmap.get(e.getKey()); + if (e.getValue().size() != olist.size()) { + return false; + } + for (Value v : e.getValue()) { + if (!olist.contains(v)) { + return false; + } + } + } + return true; + } + + private List findKey(Key key) { + List values = this.get(key); + if (values == null) { + values = new ArrayList<>(); + put(key, values); + } + return values; + } + + @Override + public MultiValuedTreeMap clone() { + MultiValuedTreeMap clone = new MultiValuedTreeMap<>(); + for (Key key : keySet()) { + List value = get(key); + List newValue = new ArrayList<>(value); + clone.put(key, newValue); + } + return clone; + } + + public String toString() { + StringBuilder result = new StringBuilder(); + String delim = ","; + for (Object name : keySet()) { + for (Object value : get(name)) { + result.append(delim); + if (name == null) { + result.append("null"); //$NON-NLS-1$ + } else { + result.append(name.toString()); + } + if (value != null) { + result.append('='); + result.append(value.toString()); + } + } + } + return "[" + result.toString() + "]"; + } +} diff --git a/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..01e578e12c75e --- /dev/null +++ b/extensions/amazon-lambda-rest/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,13 @@ +--- +name: "AWS Lambda Gateway REST API" +metadata: + keywords: + - "lambda" + - "aws" + - "amazon" + - "http" + - "rest" + categories: + - "cloud" + guide: "https://quarkus.io/guides/amazon-lambda-http" + status: "preview" diff --git a/extensions/amazon-lambda-xray/deployment/pom.xml b/extensions/amazon-lambda-xray/deployment/pom.xml index 82361891992a8..96bff089902ee 100644 --- a/extensions/amazon-lambda-xray/deployment/pom.xml +++ b/extensions/amazon-lambda-xray/deployment/pom.xml @@ -23,10 +23,6 @@ io.quarkus quarkus-amazon-lambda-xray - - org.graalvm.nativeimage - svm - diff --git a/extensions/amazon-lambda-xray/pom.xml b/extensions/amazon-lambda-xray/pom.xml index 7be7d2070003d..017de6ed72fad 100644 --- a/extensions/amazon-lambda-xray/pom.xml +++ b/extensions/amazon-lambda-xray/pom.xml @@ -5,10 +5,10 @@ 4.0.0 - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml quarkus-amazon-lambda-xray-parent diff --git a/extensions/amazon-lambda-xray/runtime/pom.xml b/extensions/amazon-lambda-xray/runtime/pom.xml index abf9c24e25746..a219a49bdb9b0 100644 --- a/extensions/amazon-lambda-xray/runtime/pom.xml +++ b/extensions/amazon-lambda-xray/runtime/pom.xml @@ -31,6 +31,7 @@ org.graalvm.nativeimage svm + provided diff --git a/extensions/amazon-lambda-xray/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda-xray/runtime/src/main/resources/META-INF/quarkus-extension.yaml index bbb79f645a17c..a0b9c03843af7 100644 --- a/extensions/amazon-lambda-xray/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/amazon-lambda-xray/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -6,6 +6,7 @@ metadata: - "aws" - "amazon" - "xray" + guide: "https://quarkus.io/guides/amazon-lambda#tracing-with-aws-xray-and-graalvm" categories: - "cloud" status: "preview" diff --git a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaCommonProcessor.java b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaCommonProcessor.java index 93ead288f693a..df692697ed573 100644 --- a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaCommonProcessor.java +++ b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaCommonProcessor.java @@ -14,12 +14,21 @@ import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.LaunchModeBuildItem; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; +import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; import io.quarkus.deployment.pkg.steps.NativeBuild; -import io.quarkus.jackson.ObjectMapperProducer; +import io.quarkus.deployment.pkg.steps.NativeSourcesBuild; +import io.quarkus.jackson.runtime.ObjectMapperProducer; import io.quarkus.runtime.LaunchMode; @SuppressWarnings("unchecked") public final class AmazonLambdaCommonProcessor { + + @BuildStep(onlyIf = NativeSourcesBuild.class) + void failForNativeSources(BuildProducer artifactResultProducer) { + throw new IllegalArgumentException( + "The Amazon Lambda extensions are incompatible with the 'native-sources' package type."); + } + /** * Lambda custom runtime does not like ipv6. */ diff --git a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/FunctionZipProcessor.java b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/FunctionZipProcessor.java index 436139236c923..bcc32c6967a36 100644 --- a/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/FunctionZipProcessor.java +++ b/extensions/amazon-lambda/common-deployment/src/main/java/io/quarkus/amazon/lambda/deployment/FunctionZipProcessor.java @@ -19,19 +19,25 @@ import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; import io.quarkus.deployment.pkg.builditem.JarBuildItem; +import io.quarkus.deployment.pkg.builditem.LegacyJarRequiredBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; import io.quarkus.deployment.pkg.steps.NativeBuild; /** - * Generate deployoment package zip for lambda. + * Generate deployment package zip for lambda. * */ public class FunctionZipProcessor { private static final Logger log = Logger.getLogger(FunctionZipProcessor.class); + @BuildStep(onlyIf = IsNormal.class, onlyIfNot = NativeBuild.class) + public void requireLegacy(BuildProducer required) { + required.produce(new LegacyJarRequiredBuildItem()); + } + /** - * Function.zip is same as the jar plus dependencies in lib/ if not uberjar + * Function.zip is same as the runner jar plus dependencies in lib/ * plus anything in src/main/zip.jvm * * @param target diff --git a/extensions/amazon-lambda/common-deployment/src/main/resources/lambda/manage.sh b/extensions/amazon-lambda/common-deployment/src/main/resources/lambda/manage.sh index c4f0f732e15a2..6990202eca9ea 100644 --- a/extensions/amazon-lambda/common-deployment/src/main/resources/lambda/manage.sh +++ b/extensions/amazon-lambda/common-deployment/src/main/resources/lambda/manage.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash function cmd_create() { echo Creating function @@ -55,12 +55,12 @@ RUNTIME=java11 ZIP_FILE=${targetUri} function usage() { - [ "_$1" == "_" ] && echo "\nUsage (JVM): \n$0 [create|delete|invoke]\ne.g.: $0 invoke" - [ "_$1" == "_" ] && echo "\nUsage (Native): \n$0 native [create|delete|invoke]\ne.g.: $0 native invoke" + [ "_$1" == "_" ] && echo -e "\nUsage (JVM): \n$0 [create|delete|invoke]\ne.g.: $0 invoke" + [ "_$1" == "_" ] && echo -e "\nUsage (Native): \n$0 native [create|delete|invoke]\ne.g.: $0 native invoke" - [ "_" == "_`which aws 2>/dev/null`" ] && echo "\naws CLI not installed. Please see https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html" - [ ! -e $HOME/.aws/credentials ] && [ "_$AWS_ACCESS_KEY_ID" == "_" ] && echo "\naws configure not setup. Please execute: aws configure" - [ "_$LAMBDA_ROLE_ARN" == "_" ] && echo "\nEnvironment variable must be set: LAMBDA_ROLE_ARN\ne.g.: export LAMBDA_ROLE_ARN=arn:aws:iam::123456789012:role/my-example-role" + [ "_" == "_`which aws 2>/dev/null`" ] && echo -e "\naws CLI not installed. Please see https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html" + [ ! -e $HOME/.aws/credentials ] && [ "_$AWS_ACCESS_KEY_ID" == "_" ] && echo -e "\naws configure not setup. Please execute: aws configure" + [ "_$LAMBDA_ROLE_ARN" == "_" ] && echo -e "\nEnvironment variable must be set: LAMBDA_ROLE_ARN\ne.g.: export LAMBDA_ROLE_ARN=arn:aws:iam::123456789012:role/my-example-role" } if [ "_$1" == "_" ] || [ "$1" == "help" ] diff --git a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AbstractLambdaPollLoop.java b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AbstractLambdaPollLoop.java index 347f0b3d4c845..0777ce720e8a6 100644 --- a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AbstractLambdaPollLoop.java +++ b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AbstractLambdaPollLoop.java @@ -14,7 +14,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.ObjectWriter; import io.quarkus.runtime.Application; import io.quarkus.runtime.ShutdownContext; @@ -37,7 +36,6 @@ public AbstractLambdaPollLoop(ObjectMapper objectMapper, ObjectReader cognitoIdR public void startPollLoop(ShutdownContext context) { final AtomicBoolean running = new AtomicBoolean(true); - final Thread pollingThread = new Thread(new Runnable() { @SuppressWarnings("unchecked") @Override @@ -123,8 +121,11 @@ public void run() { } } }, "Lambda Thread"); + pollingThread.setDaemon(true); context.addShutdownTask(() -> { running.set(false); + //note that this does not seem to be 100% reliable in unblocking the thread + //which is why it is a daemon. pollingThread.interrupt(); }); pollingThread.start(); @@ -144,9 +145,9 @@ public void run() { protected abstract void processRequest(InputStream input, OutputStream output, AmazonLambdaContext context) throws Exception; - protected abstract ObjectReader getInputReader(); + protected abstract LambdaInputReader getInputReader(); - protected abstract ObjectWriter getOutputWriter(); + protected abstract LambdaOutputWriter getOutputWriter(); protected AmazonLambdaContext createContext(HttpURLConnection requestConnection) throws IOException { return new AmazonLambdaContext(requestConnection, cognitoIdReader, clientCtxReader); @@ -202,4 +203,4 @@ boolean abortGracefully(Exception ex) { return graceful; } -} \ No newline at end of file +} diff --git a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonInputReader.java b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonInputReader.java new file mode 100644 index 0000000000000..e66a774548c7b --- /dev/null +++ b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonInputReader.java @@ -0,0 +1,19 @@ +package io.quarkus.amazon.lambda.runtime; + +import java.io.IOException; +import java.io.InputStream; + +import com.fasterxml.jackson.databind.ObjectReader; + +public class JacksonInputReader implements LambdaInputReader { + final private ObjectReader reader; + + public JacksonInputReader(ObjectReader reader) { + this.reader = reader; + } + + @Override + public Object readValue(InputStream is) throws IOException { + return reader.readValue(is); + } +} diff --git a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonOutputWriter.java b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonOutputWriter.java new file mode 100644 index 0000000000000..d950a57f741cf --- /dev/null +++ b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonOutputWriter.java @@ -0,0 +1,19 @@ +package io.quarkus.amazon.lambda.runtime; + +import java.io.IOException; +import java.io.OutputStream; + +import com.fasterxml.jackson.databind.ObjectWriter; + +public class JacksonOutputWriter implements LambdaOutputWriter { + final private ObjectWriter writer; + + public JacksonOutputWriter(ObjectWriter writer) { + this.writer = writer; + } + + @Override + public void writeValue(OutputStream os, Object obj) throws IOException { + writer.writeValue(os, obj); + } +} diff --git a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaInputReader.java b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaInputReader.java new file mode 100644 index 0000000000000..ff9574d608715 --- /dev/null +++ b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaInputReader.java @@ -0,0 +1,8 @@ +package io.quarkus.amazon.lambda.runtime; + +import java.io.IOException; +import java.io.InputStream; + +public interface LambdaInputReader { + T readValue(InputStream is) throws IOException; +} diff --git a/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaOutputWriter.java b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaOutputWriter.java new file mode 100644 index 0000000000000..d9de5be4e9ff0 --- /dev/null +++ b/extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/LambdaOutputWriter.java @@ -0,0 +1,8 @@ +package io.quarkus.amazon.lambda.runtime; + +import java.io.IOException; +import java.io.OutputStream; + +public interface LambdaOutputWriter { + void writeValue(OutputStream os, Object obj) throws IOException; +} diff --git a/extensions/amazon-lambda/common-runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda/common-runtime/src/main/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..135c2caef5587 --- /dev/null +++ b/extensions/amazon-lambda/common-runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,4 @@ +--- +name: "AWS Lambda Common" +metadata: + unlisted: true diff --git a/extensions/amazon-lambda/deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaProcessor.java b/extensions/amazon-lambda/deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaProcessor.java index fa3a57f31263b..128109ea22523 100644 --- a/extensions/amazon-lambda/deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaProcessor.java +++ b/extensions/amazon-lambda/deployment/src/main/java/io/quarkus/amazon/lambda/deployment/AmazonLambdaProcessor.java @@ -118,8 +118,16 @@ List discover(CombinedIndexBuildItem combinedIndexBuildIt break; } else if (method.parameters().size() == 2 && !method.parameters().get(0).name().equals(DotName.createSimple(Object.class.getName()))) { - reflectiveHierarchy.produce(new ReflectiveHierarchyBuildItem(method.parameters().get(0))); - reflectiveHierarchy.produce(new ReflectiveHierarchyBuildItem(method.returnType())); + String source = getClass().getSimpleName() + " > " + method.declaringClass() + "[" + method + "]"; + + reflectiveHierarchy.produce(new ReflectiveHierarchyBuildItem.Builder() + .type(method.parameters().get(0)) + .source(source) + .build()); + reflectiveHierarchy.produce(new ReflectiveHierarchyBuildItem.Builder() + .type(method.returnType()) + .source(source) + .build()); done = true; break; } diff --git a/extensions/amazon-lambda/maven-archetype/pom.xml b/extensions/amazon-lambda/maven-archetype/pom.xml index 958d7c2f6d6fb..32e7bcd3439bf 100644 --- a/extensions/amazon-lambda/maven-archetype/pom.xml +++ b/extensions/amazon-lambda/maven-archetype/pom.xml @@ -6,7 +6,6 @@ quarkus-amazon-lambda-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 diff --git a/extensions/amazon-lambda/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/extensions/amazon-lambda/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml index 6a193f75b4558..adf934726668b 100644 --- a/extensions/amazon-lambda/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml +++ b/extensions/amazon-lambda/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml @@ -9,7 +9,7 @@ to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an diff --git a/extensions/amazon-lambda/maven-archetype/src/main/resources/archetype-resources/pom.xml b/extensions/amazon-lambda/maven-archetype/src/main/resources/archetype-resources/pom.xml index 6d91e08c433d9..d43f3f1d25ef6 100644 --- a/extensions/amazon-lambda/maven-archetype/src/main/resources/archetype-resources/pom.xml +++ b/extensions/amazon-lambda/maven-archetype/src/main/resources/archetype-resources/pom.xml @@ -84,25 +84,9 @@ native - - - - io.quarkus - quarkus-maven-plugin - \${quarkus-plugin.version} - - - - native-image - - - true - - - - - - + + native + diff --git a/extensions/amazon-lambda/pom.xml b/extensions/amazon-lambda/pom.xml index 74a5fdbd938c9..71ccac1880aee 100644 --- a/extensions/amazon-lambda/pom.xml +++ b/extensions/amazon-lambda/pom.xml @@ -5,10 +5,10 @@ 4.0.0 - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml quarkus-amazon-lambda-parent diff --git a/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AmazonLambdaRecorder.java b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AmazonLambdaRecorder.java index 5374bf6c47943..025873490e433 100644 --- a/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AmazonLambdaRecorder.java +++ b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AmazonLambdaRecorder.java @@ -12,10 +12,10 @@ import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.amazonaws.services.lambda.runtime.RequestStreamHandler; +import com.amazonaws.services.lambda.runtime.events.S3Event; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.ObjectWriter; +import io.quarkus.amazon.lambda.runtime.handlers.S3EventInputReader; import io.quarkus.arc.runtime.BeanContainer; import io.quarkus.runtime.ShutdownContext; import io.quarkus.runtime.annotations.Recorder; @@ -32,8 +32,8 @@ public class AmazonLambdaRecorder { private static Class> handlerClass; private static Class streamHandlerClass; private static BeanContainer beanContainer; - private static ObjectReader objectReader; - private static ObjectWriter objectWriter; + private static LambdaInputReader objectReader; + private static LambdaOutputWriter objectWriter; public void setStreamHandlerClass(Class handler, BeanContainer container) { streamHandlerClass = handler; @@ -45,8 +45,12 @@ public void setHandlerClass(Class> handler, BeanC beanContainer = container; ObjectMapper objectMapper = AmazonLambdaMapperRecorder.objectMapper; Method handlerMethod = discoverHandlerMethod(handlerClass); - objectReader = objectMapper.readerFor(handlerMethod.getParameterTypes()[0]); - objectWriter = objectMapper.writerFor(handlerMethod.getReturnType()); + if (handlerMethod.getParameterTypes()[0].equals(S3Event.class)) { + objectReader = new S3EventInputReader(objectMapper); + } else { + objectReader = new JacksonInputReader(objectMapper.readerFor(handlerMethod.getParameterTypes()[0])); + } + objectWriter = new JacksonOutputWriter(objectMapper.writerFor(handlerMethod.getReturnType())); } /** @@ -80,9 +84,12 @@ private Method discoverHandlerMethod(Class> handl } } } - if (method == null) { + if (method == null && methods.length > 0) { method = methods[0]; } + if (method == null) { + throw new RuntimeException("Unable to find a method which handles request on handler class " + handlerClass); + } return method; } @@ -149,12 +156,12 @@ protected Object processRequest(Object input, AmazonLambdaContext context) throw } @Override - protected ObjectReader getInputReader() { + protected LambdaInputReader getInputReader() { return objectReader; } @Override - protected ObjectWriter getOutputWriter() { + protected LambdaOutputWriter getOutputWriter() { return objectWriter; } diff --git a/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/JacksonUtil.java b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/JacksonUtil.java new file mode 100644 index 0000000000000..ef02659f069db --- /dev/null +++ b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/JacksonUtil.java @@ -0,0 +1,15 @@ +package io.quarkus.amazon.lambda.runtime.handlers; + +import com.fasterxml.jackson.databind.JsonNode; + +public class JacksonUtil { + public static String getText(String name, JsonNode node) { + JsonNode e = node.get(name); + return e == null ? null : e.asText(); + } + + public static Long getLong(String name, JsonNode node) { + JsonNode e = node.get(name); + return e == null ? null : e.asLong(); + } +} diff --git a/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/S3EventInputReader.java b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/S3EventInputReader.java new file mode 100644 index 0000000000000..602c602259b44 --- /dev/null +++ b/extensions/amazon-lambda/runtime/src/main/java/io/quarkus/amazon/lambda/runtime/handlers/S3EventInputReader.java @@ -0,0 +1,97 @@ +package io.quarkus.amazon.lambda.runtime.handlers; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import com.amazonaws.services.lambda.runtime.events.S3Event; +import com.amazonaws.services.lambda.runtime.events.models.s3.S3EventNotification; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.quarkus.amazon.lambda.runtime.LambdaInputReader; + +public class S3EventInputReader implements LambdaInputReader { + final ObjectMapper mapper; + + public S3EventInputReader(ObjectMapper mapper) { + this.mapper = mapper; + } + + @Override + public S3Event readValue(InputStream is) throws IOException { + JsonNode json = mapper.readTree(is); + JsonNode records = json.get("Records"); + if (records == null || !records.isArray()) { + return new S3Event(Collections.EMPTY_LIST); + } + List list = new ArrayList<>(); + + for (int i = 0; i < records.size(); i++) { + JsonNode record = records.get(i); + String awsRegion = JacksonUtil.getText("awsRegion", record); + String eventName = JacksonUtil.getText("eventName", record); + String eventSource = JacksonUtil.getText("eventSource", record); + String eventTime = JacksonUtil.getText("eventTime", record); + String eventVersion = JacksonUtil.getText("eventVersion", record); + JsonNode params = record.get("requestParameters"); + S3EventNotification.RequestParametersEntity requestParameters = null; + if (params != null) { + requestParameters = new S3EventNotification.RequestParametersEntity( + JacksonUtil.getText("sourceIPAddress", params)); + } + JsonNode elems = record.get("responseElements"); + S3EventNotification.ResponseElementsEntity responseElements = null; + if (elems != null) { + String requestId = JacksonUtil.getText("x-amz-request-id", elems); + String id = JacksonUtil.getText("x-amz-id-2", elems); + responseElements = new S3EventNotification.ResponseElementsEntity(id, requestId); + } + JsonNode userIdentity = record.get("userIdentity"); + S3EventNotification.UserIdentityEntity userId = null; + if (userIdentity != null) { + String principalId = JacksonUtil.getText("principalId", userIdentity); + userId = new S3EventNotification.UserIdentityEntity(principalId); + } + + JsonNode s3 = record.get("s3"); + S3EventNotification.S3Entity s3Entity = null; + if (s3 != null) { + String configurationId = JacksonUtil.getText("configurationId", s3); + String schemaVersion = JacksonUtil.getText("s3SchemaVersion", s3); + JsonNode bucketNode = s3.get("bucket"); + S3EventNotification.S3BucketEntity bucket = null; + if (bucketNode != null) { + String name = JacksonUtil.getText("name", bucketNode); + JsonNode ownerIdentity = bucketNode.get("ownerIdentity"); + S3EventNotification.UserIdentityEntity owner = null; + if (ownerIdentity != null) { + String principalId = JacksonUtil.getText("principalId", ownerIdentity); + owner = new S3EventNotification.UserIdentityEntity(principalId); + } + String arn = JacksonUtil.getText("arn", bucketNode); + bucket = new S3EventNotification.S3BucketEntity(name, owner, arn); + } + JsonNode object = s3.get("object"); + S3EventNotification.S3ObjectEntity obj = null; + if (object != null) { + String key = JacksonUtil.getText("key", object); + Long size = JacksonUtil.getLong("size", object); + String eTag = JacksonUtil.getText("eTag", object); + String versionId = JacksonUtil.getText("versionId", object); + String sequencer = JacksonUtil.getText("sequencer", object); + obj = new S3EventNotification.S3ObjectEntity(key, size, eTag, versionId, sequencer); + } + s3Entity = new S3EventNotification.S3Entity(configurationId, bucket, obj, schemaVersion); + } + S3EventNotification.S3EventNotificationRecord r = new S3EventNotification.S3EventNotificationRecord(awsRegion, + eventName, eventSource, eventTime, + eventVersion, requestParameters, + responseElements, s3Entity, userId); + list.add(r); + } + return new S3Event(list); + } +} diff --git a/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml index ee534812a3b46..caa4652ad141a 100644 --- a/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/amazon-lambda/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -1,11 +1,15 @@ ---- name: "AWS Lambda" metadata: keywords: - - "lambda" - - "aws" - - "amazon" + - "lambda" + - "aws" + - "amazon" categories: - - "cloud" + - "cloud" guide: "https://quarkus.io/guides/amazon-lambda" status: "preview" + codestart: + name: "amazon-lambda" + kind: "singleton-example" + languages: "java" + artifact: "io.quarkus:quarkus-descriptor-json" diff --git a/extensions/amazon-lambda/runtime/src/test/java/io/quarkus/amazon/lambda/runtime/S3EventTest.java b/extensions/amazon-lambda/runtime/src/test/java/io/quarkus/amazon/lambda/runtime/S3EventTest.java new file mode 100644 index 0000000000000..400759f43e959 --- /dev/null +++ b/extensions/amazon-lambda/runtime/src/test/java/io/quarkus/amazon/lambda/runtime/S3EventTest.java @@ -0,0 +1,85 @@ +package io.quarkus.amazon.lambda.runtime; + +import java.io.ByteArrayInputStream; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import com.amazonaws.services.lambda.runtime.events.S3Event; +import com.amazonaws.services.lambda.runtime.events.models.s3.S3EventNotification; +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.quarkus.amazon.lambda.runtime.handlers.S3EventInputReader; + +public class S3EventTest { + + static String json = "{\n" + + " \"Records\":[\n" + + " {\n" + + " \"eventVersion\":\"2.0\",\n" + + " \"eventSource\":\"aws:s3\",\n" + + " \"awsRegion\":\"us-west-2\",\n" + + " \"eventTime\":\"1970-01-01T00:00:00.000Z\",\n" + + " \"eventName\":\"ObjectCreated:Put\",\n" + + " \"userIdentity\":{\n" + + " \"principalId\":\"AIDAJDPLRKLG7UEXAMPLE\"\n" + + " },\n" + + " \"requestParameters\":{\n" + + " \"sourceIPAddress\":\"127.0.0.1\"\n" + + " },\n" + + " \"responseElements\":{\n" + + " \"x-amz-request-id\":\"C3D13FE58DE4C810\",\n" + + " \"x-amz-id-2\":\"FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD\"\n" + + " },\n" + + " \"s3\":{\n" + + " \"s3SchemaVersion\":\"1.0\",\n" + + " \"configurationId\":\"testConfigRule\",\n" + + " \"bucket\":{\n" + + " \"name\":\"sourcebucket\",\n" + + " \"ownerIdentity\":{\n" + + " \"principalId\":\"A3NL1KOZZKExample\"\n" + + " },\n" + + " \"arn\":\"arn:aws:s3:::sourcebucket\"\n" + + " },\n" + + " \"object\":{\n" + + " \"key\":\"HappyFace.jpg\",\n" + + " \"size\":1024,\n" + + " \"eTag\":\"d41d8cd98f00b204e9800998ecf8427e\",\n" + + " \"versionId\":\"096fKKXTRTtl3on89fVO.nfljtsv6qko\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + + @Test + public void testParse() throws Exception { + ObjectMapper mapper = new ObjectMapper(); + S3EventInputReader reader = new S3EventInputReader(mapper); + ByteArrayInputStream is = new ByteArrayInputStream(json.getBytes()); + S3Event event = reader.readValue(is); + Assertions.assertEquals(1, event.getRecords().size()); + S3EventNotification.S3EventNotificationRecord record = event.getRecords().get(0); + Assertions.assertEquals("2.0", record.getEventVersion()); + Assertions.assertEquals("aws:s3", record.getEventSource()); + Assertions.assertEquals("us-west-2", record.getAwsRegion()); + Assertions.assertEquals("1970-01-01T00:00:00.000Z", record.getEventTime().toString()); + Assertions.assertEquals("ObjectCreated:Put", record.getEventName()); + Assertions.assertEquals("AIDAJDPLRKLG7UEXAMPLE", record.getUserIdentity().getPrincipalId()); + Assertions.assertEquals("127.0.0.1", record.getRequestParameters().getSourceIPAddress()); + Assertions.assertEquals("C3D13FE58DE4C810", record.getResponseElements().getxAmzRequestId()); + Assertions.assertEquals("FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD", + record.getResponseElements().getxAmzId2()); + + Assertions.assertEquals("1.0", record.getS3().getS3SchemaVersion()); + Assertions.assertEquals("testConfigRule", record.getS3().getConfigurationId()); + Assertions.assertEquals("sourcebucket", record.getS3().getBucket().getName()); + Assertions.assertEquals("arn:aws:s3:::sourcebucket", record.getS3().getBucket().getArn()); + Assertions.assertEquals("A3NL1KOZZKExample", record.getS3().getBucket().getOwnerIdentity().getPrincipalId()); + Assertions.assertEquals("HappyFace.jpg", record.getS3().getObject().getKey()); + Assertions.assertEquals(1024, record.getS3().getObject().getSizeAsLong()); + Assertions.assertEquals("d41d8cd98f00b204e9800998ecf8427e", record.getS3().getObject().geteTag()); + Assertions.assertEquals("096fKKXTRTtl3on89fVO.nfljtsv6qko", record.getS3().getObject().getVersionId()); + + } +} diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AbstractAmazonServiceProcessor.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AbstractAmazonServiceProcessor.java index 46b7ae6cf6982..7928a7617e82e 100644 --- a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AbstractAmazonServiceProcessor.java +++ b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AbstractAmazonServiceProcessor.java @@ -7,8 +7,10 @@ import org.jboss.jandex.DotName; import org.jboss.jandex.Type; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.common.runtime.AwsConfig; import io.quarkus.amazon.common.runtime.NettyHttpClientConfig; import io.quarkus.amazon.common.runtime.SdkBuildTimeConfig; @@ -72,58 +74,103 @@ protected void setupExtension(BeanRegistrationPhaseBuildItem beanRegistrationPha } } - public void createTransportBuilders(List amazonClients, - AmazonClientTransportRecorder recorder, + protected void createApacheSyncTransportBuilder(List amazonClients, + AmazonClientApacheTransportRecorder recorder, SyncHttpClientBuildTimeConfig buildSyncConfig, RuntimeValue syncConfig, - RuntimeValue asyncConfig, - BuildProducer clientTransports) { + BuildProducer clientSyncTransports) { Optional matchingClientBuildItem = amazonClients.stream() .filter(c -> c.getAwsClientName().equals(configName())) .findAny(); matchingClientBuildItem.ifPresent(client -> { - RuntimeValue syncTransport = null; - RuntimeValue asyncTransport = null; - - if (client.getSyncClassName().isPresent()) { - syncTransport = recorder.configureSync(configName(), buildSyncConfig, - syncConfig); + if (!client.getSyncClassName().isPresent()) { + return; } + if (buildSyncConfig.type != SyncHttpClientBuildTimeConfig.SyncClientType.APACHE) { + return; + } + + clientSyncTransports.produce( + new AmazonClientSyncTransportBuildItem( + client.getAwsClientName(), + client.getSyncClassName().get(), + recorder.configureSync(configName(), syncConfig))); + }); + } + + protected void createUrlConnectionSyncTransportBuilder(List amazonClients, + AmazonClientUrlConnectionTransportRecorder recorder, + SyncHttpClientBuildTimeConfig buildSyncConfig, + RuntimeValue syncConfig, + BuildProducer clientSyncTransports) { + + Optional matchingClientBuildItem = amazonClients.stream() + .filter(c -> c.getAwsClientName().equals(configName())) + .findAny(); - if (client.getAsyncClassName().isPresent()) { - asyncTransport = recorder.configureAsync(configName(), asyncConfig); + matchingClientBuildItem.ifPresent(client -> { + if (!client.getSyncClassName().isPresent()) { + return; + } + if (buildSyncConfig.type != SyncHttpClientBuildTimeConfig.SyncClientType.URL) { + return; } - clientTransports.produce( - new AmazonClientTransportsBuildItem( - client.getSyncClassName(), client.getAsyncClassName(), - syncTransport, - asyncTransport, - client.getAwsClientName())); + clientSyncTransports.produce( + new AmazonClientSyncTransportBuildItem( + client.getAwsClientName(), + client.getSyncClassName().get(), + recorder.configureSync(configName(), syncConfig))); }); + } + protected void createNettyAsyncTransportBuilder(List amazonClients, + AmazonClientNettyTransportRecorder recorder, + RuntimeValue asyncConfig, + BuildProducer clientAsyncTransports) { + + Optional matchingClientBuildItem = amazonClients.stream() + .filter(c -> c.getAwsClientName().equals(configName())) + .findAny(); + + matchingClientBuildItem.ifPresent(client -> { + if (!client.getAsyncClassName().isPresent()) { + return; + } + + clientAsyncTransports.produce( + new AmazonClientAsyncTransportBuildItem( + client.getAwsClientName(), + client.getAsyncClassName().get(), + recorder.configureAsync(configName(), asyncConfig))); + }); } - protected void createClientBuilders(List clients, + protected void createClientBuilders(List syncClientBuilders, + List asyncClientBuilders, BuildProducer builderProducer, Function, RuntimeValue> syncFunc, Function, RuntimeValue> asyncFunc) { - - for (AmazonClientTransportsBuildItem client : clients) { - if (configName().equals(client.getAwsClientName())) { - RuntimeValue syncBuilder = null; - RuntimeValue asyncBuilder = null; - if (client.getSyncClassName().isPresent()) { - syncBuilder = syncFunc.apply(client.getSyncTransport()); - } - if (client.getAsyncClassName().isPresent()) { - asyncBuilder = asyncFunc.apply(client.getAsyncTransport()); - } - builderProducer.produce(new AmazonClientBuilderBuildItem(client.getAwsClientName(), syncBuilder, asyncBuilder)); - } + String configName = configName(); + + Optional> syncClientBuilder = syncClientBuilders.stream() + .filter(c -> configName.equals(c.getAwsClientName())) + .map(c -> c.getClientBuilder()) + .findFirst(); + Optional> asyncClientBuilder = asyncClientBuilders.stream() + .filter(c -> configName.equals(c.getAwsClientName())) + .map(c -> c.getClientBuilder()) + .findFirst(); + + if (!syncClientBuilder.isPresent() && !asyncClientBuilder.isPresent()) { + return; } + + builderProducer.produce(new AmazonClientBuilderBuildItem(configName, + syncClientBuilder.isPresent() ? syncFunc.apply(syncClientBuilder.get()) : null, + asyncClientBuilder.isPresent() ? asyncFunc.apply(asyncClientBuilder.get()) : null)); } protected void buildClients(List configuredClients, diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientAsyncTransportBuildItem.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientAsyncTransportBuildItem.java new file mode 100644 index 0000000000000..7c44981ab8395 --- /dev/null +++ b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientAsyncTransportBuildItem.java @@ -0,0 +1,34 @@ +package io.quarkus.amazon.common.deployment; + +import org.jboss.jandex.DotName; + +import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.runtime.RuntimeValue; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; + +public final class AmazonClientAsyncTransportBuildItem extends MultiBuildItem { + + private final String awsClientName; + private final DotName className; + private final RuntimeValue clientBuilder; + + public AmazonClientAsyncTransportBuildItem(String awsClientName, + DotName className, + RuntimeValue clientBuilder) { + this.awsClientName = awsClientName; + this.className = className; + this.clientBuilder = clientBuilder; + } + + public String getAwsClientName() { + return awsClientName; + } + + public DotName getClassName() { + return className; + } + + public RuntimeValue getClientBuilder() { + return clientBuilder; + } +} diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientSyncTransportBuildItem.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientSyncTransportBuildItem.java new file mode 100644 index 0000000000000..6501d3f62543e --- /dev/null +++ b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientSyncTransportBuildItem.java @@ -0,0 +1,34 @@ +package io.quarkus.amazon.common.deployment; + +import org.jboss.jandex.DotName; + +import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.runtime.RuntimeValue; +import software.amazon.awssdk.http.SdkHttpClient; + +public final class AmazonClientSyncTransportBuildItem extends MultiBuildItem { + + private final String awsClientName; + private final DotName className; + private final RuntimeValue clientBuilder; + + public AmazonClientSyncTransportBuildItem(String awsClientName, + DotName className, + RuntimeValue clientBuilder) { + this.awsClientName = awsClientName; + this.className = className; + this.clientBuilder = clientBuilder; + } + + public String getAwsClientName() { + return awsClientName; + } + + public DotName getClassName() { + return className; + } + + public RuntimeValue getClientBuilder() { + return clientBuilder; + } +} diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientTransportsBuildItem.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientTransportsBuildItem.java deleted file mode 100644 index 94b00bd12dc66..0000000000000 --- a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonClientTransportsBuildItem.java +++ /dev/null @@ -1,51 +0,0 @@ -package io.quarkus.amazon.common.deployment; - -import java.util.Optional; - -import org.jboss.jandex.DotName; - -import io.quarkus.builder.item.MultiBuildItem; -import io.quarkus.runtime.RuntimeValue; -import software.amazon.awssdk.http.SdkHttpClient; -import software.amazon.awssdk.http.SdkHttpClient.Builder; -import software.amazon.awssdk.http.async.SdkAsyncHttpClient; - -public final class AmazonClientTransportsBuildItem extends MultiBuildItem { - private final Optional syncClassName; - private final Optional asyncClassName; - private final RuntimeValue syncTransport; - private final RuntimeValue asyncTransport; - - private final String awsClientName; - - public AmazonClientTransportsBuildItem(Optional syncClassName, Optional asyncClassName, - RuntimeValue syncTransport, - RuntimeValue asyncTransport, - String awsClientName) { - this.syncClassName = syncClassName; - this.asyncClassName = asyncClassName; - this.syncTransport = syncTransport; - this.asyncTransport = asyncTransport; - this.awsClientName = awsClientName; - } - - public Optional getSyncClassName() { - return syncClassName; - } - - public Optional getAsyncClassName() { - return asyncClassName; - } - - public RuntimeValue getSyncTransport() { - return syncTransport; - } - - public RuntimeValue getAsyncTransport() { - return asyncTransport; - } - - public String getAwsClientName() { - return awsClientName; - } -} diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonHttpClients.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonHttpClients.java new file mode 100644 index 0000000000000..7642ba22804db --- /dev/null +++ b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonHttpClients.java @@ -0,0 +1,46 @@ +package io.quarkus.amazon.common.deployment; + +import java.util.function.BooleanSupplier; + +public class AmazonHttpClients { + + public static final String APACHE_HTTP_SERVICE = "software.amazon.awssdk.http.apache.ApacheSdkHttpService"; + public static final String NETTY_HTTP_SERVICE = "software.amazon.awssdk.http.nio.netty.NettySdkAsyncHttpService"; + public static final String URL_CONNECTION_HTTP_SERVICE = "software.amazon.awssdk.http.urlconnection.UrlConnectionSdkHttpService"; + + public static class IsAmazonApacheHttpServicePresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + try { + Class.forName(APACHE_HTTP_SERVICE); + return true; + } catch (Exception e) { + return false; + } + } + }; + + public static class IsAmazonNettyHttpServicePresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + try { + Class.forName(NETTY_HTTP_SERVICE); + return true; + } catch (Exception e) { + return false; + } + } + }; + + public static class IsAmazonUrlConnectionHttpServicePresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + try { + Class.forName(URL_CONNECTION_HTTP_SERVICE); + return true; + } catch (Exception e) { + return false; + } + } + }; +} diff --git a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonServicesClientsProcessor.java b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonServicesClientsProcessor.java index 92253de931b7a..204f2c50f0a9f 100644 --- a/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonServicesClientsProcessor.java +++ b/extensions/amazon-services/common/deployment/src/main/java/io/quarkus/amazon/common/deployment/AmazonServicesClientsProcessor.java @@ -27,10 +27,7 @@ public class AmazonServicesClientsProcessor { public static final String AWS_SDK_APPLICATION_ARCHIVE_MARKERS = "software/amazon/awssdk"; - - private static final String APACHE_HTTP_SERVICE = "software.amazon.awssdk.http.apache.ApacheSdkHttpService"; - private static final String NETTY_HTTP_SERVICE = "software.amazon.awssdk.http.nio.netty.NettySdkAsyncHttpService"; - private static final String URL_HTTP_SERVICE = "software.amazon.awssdk.http.urlconnection.UrlConnectionSdkHttpService"; + public static final String AWS_SDK_XRAY_ARCHIVE_MARKER = "com/amazonaws/xray"; private static final DotName EXECUTION_INTERCEPTOR_NAME = DotName.createSimple(ExecutionInterceptor.class.getName()); @@ -41,8 +38,9 @@ void globalInterceptors(BuildProducer pro } @BuildStep - AdditionalApplicationArchiveMarkerBuildItem awsAppArchiveMarkers() { - return new AdditionalApplicationArchiveMarkerBuildItem(AWS_SDK_APPLICATION_ARCHIVE_MARKERS); + void awsAppArchiveMarkers(BuildProducer archiveMarker) { + archiveMarker.produce(new AdditionalApplicationArchiveMarkerBuildItem(AWS_SDK_APPLICATION_ARCHIVE_MARKERS)); + archiveMarker.produce(new AdditionalApplicationArchiveMarkerBuildItem(AWS_SDK_XRAY_ARCHIVE_MARKER)); } @BuildStep @@ -67,8 +65,9 @@ void setup(CombinedIndexBuildItem combinedIndexBuildItem, for (AmazonClientBuildItem client : amazonClients) { SdkBuildTimeConfig clientSdkConfig = client.getBuildTimeSdkConfig(); if (clientSdkConfig != null) { - clientSdkConfig.interceptors.orElse(Collections.emptyList()).forEach(interceptorClass -> { - if (!knownInterceptorImpls.contains(interceptorClass.getName())) { + clientSdkConfig.interceptors.orElse(Collections.emptyList()).forEach(interceptorClassName -> { + interceptorClassName = interceptorClassName.trim(); + if (!knownInterceptorImpls.contains(interceptorClassName)) { throw new ConfigurationError( String.format( "quarkus.%s.interceptors (%s) - must list only existing implementations of software.amazon.awssdk.core.interceptor.ExecutionInterceptor", @@ -92,39 +91,75 @@ void setup(CombinedIndexBuildItem combinedIndexBuildItem, final Predicate isSyncApache = client -> client .getBuildTimeSyncConfig().type == SyncClientType.APACHE; - //Register only clients that are used + // Register what's needed depending on the clients in the classpath and the configuration. + // We use the configuration to guide us but if we don't have any clients configured, + // we still register what's needed depending on what is in the classpath. + boolean isSyncApacheInClasspath = isInClasspath(AmazonHttpClients.APACHE_HTTP_SERVICE); + boolean isSyncUrlConnectionInClasspath = isInClasspath(AmazonHttpClients.URL_CONNECTION_HTTP_SERVICE); + boolean isAsyncInClasspath = isInClasspath(AmazonHttpClients.NETTY_HTTP_SERVICE); + + // Check that the clients required by the configuration are available if (syncTransportNeeded) { if (amazonClients.stream().filter(isSyncApache).findAny().isPresent()) { - checkClasspath(APACHE_HTTP_SERVICE, "apache-client"); - //Register Apache client as sync client - proxyDefinition - .produce(new NativeImageProxyDefinitionBuildItem("org.apache.http.conn.HttpClientConnectionManager", - "org.apache.http.pool.ConnPoolControl", - "software.amazon.awssdk.http.apache.internal.conn.Wrapped")); - - serviceProvider.produce( - new ServiceProviderBuildItem(SdkHttpService.class.getName(), APACHE_HTTP_SERVICE)); + if (isSyncApacheInClasspath) { + registerSyncApacheClient(proxyDefinition, serviceProvider); + } else { + throw missingDependencyException("apache-client"); + } + } else if (isSyncUrlConnectionInClasspath) { + registerSyncUrlConnectionClient(serviceProvider); } else { - checkClasspath(URL_HTTP_SERVICE, "url-connection-client"); - serviceProvider.produce(new ServiceProviderBuildItem(SdkHttpService.class.getName(), URL_HTTP_SERVICE)); + throw missingDependencyException("url-connection-client"); + } + } else { + // even if we don't register any clients via configuration, we still register the clients + // but this time only based on the classpath. + if (isSyncApacheInClasspath) { + registerSyncApacheClient(proxyDefinition, serviceProvider); + } else if (isSyncUrlConnectionInClasspath) { + registerSyncUrlConnectionClient(serviceProvider); } } - if (asyncTransportNeeded) { - checkClasspath(NETTY_HTTP_SERVICE, "netty-nio-client"); - //Register netty as async client - serviceProvider.produce( - new ServiceProviderBuildItem(SdkAsyncHttpService.class.getName(), - NETTY_HTTP_SERVICE)); + if (isAsyncInClasspath) { + registerAsyncNettyClient(serviceProvider); + } else if (asyncTransportNeeded) { + throw missingDependencyException("netty-nio-client"); } } - private void checkClasspath(String className, String dependencyName) { + private static void registerSyncApacheClient(BuildProducer proxyDefinition, + BuildProducer serviceProvider) { + proxyDefinition + .produce(new NativeImageProxyDefinitionBuildItem("org.apache.http.conn.HttpClientConnectionManager", + "org.apache.http.pool.ConnPoolControl", + "software.amazon.awssdk.http.apache.internal.conn.Wrapped")); + + serviceProvider.produce( + new ServiceProviderBuildItem(SdkHttpService.class.getName(), AmazonHttpClients.APACHE_HTTP_SERVICE)); + } + + private static void registerSyncUrlConnectionClient(BuildProducer serviceProvider) { + serviceProvider.produce( + new ServiceProviderBuildItem(SdkHttpService.class.getName(), AmazonHttpClients.URL_CONNECTION_HTTP_SERVICE)); + } + + private static void registerAsyncNettyClient(BuildProducer serviceProvider) { + serviceProvider.produce( + new ServiceProviderBuildItem(SdkAsyncHttpService.class.getName(), + AmazonHttpClients.NETTY_HTTP_SERVICE)); + } + + private static boolean isInClasspath(String className) { try { Class.forName(className, true, Thread.currentThread().getContextClassLoader()); + return true; } catch (ClassNotFoundException e) { - throw new DeploymentException( - "Missing 'software.amazon.awssdk:" + dependencyName + "' dependency on the classpath"); + return false; } } + + private DeploymentException missingDependencyException(String dependencyName) { + return new DeploymentException("Missing 'software.amazon.awssdk:" + dependencyName + "' dependency on the classpath"); + } } diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AbstractAmazonClientTransportRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AbstractAmazonClientTransportRecorder.java new file mode 100644 index 0000000000000..aa2682e0a9c1b --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AbstractAmazonClientTransportRecorder.java @@ -0,0 +1,115 @@ +package io.quarkus.amazon.common.runtime; + +import java.net.URI; +import java.util.Optional; + +import io.quarkus.runtime.RuntimeValue; +import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.http.TlsKeyManagersProvider; +import software.amazon.awssdk.http.TlsTrustManagersProvider; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.utils.StringUtils; + +public abstract class AbstractAmazonClientTransportRecorder { + + @SuppressWarnings("rawtypes") + public RuntimeValue configureSync(String clientName, + RuntimeValue syncConfigRuntime) { + throw new IllegalStateException("Configuring a sync client is not supported by " + this.getClass().getName()); + } + + @SuppressWarnings("rawtypes") + public RuntimeValue configureAsync(String clientName, + RuntimeValue asyncConfigRuntime) { + throw new IllegalStateException("Configuring an async client is not supported by " + this.getClass().getName()); + } + + protected Optional getTlsKeyManagersProvider(TlsKeyManagersProviderConfig config) { + if (config.fileStore != null && config.fileStore.path.isPresent() && config.fileStore.type.isPresent()) { + return Optional.of(config.type.create(config)); + } + return Optional.empty(); + } + + protected Optional getTlsTrustManagersProvider(TlsTrustManagersProviderConfig config) { + if (config.fileStore != null && config.fileStore.path.isPresent() && config.fileStore.type.isPresent()) { + return Optional.of(config.type.create(config)); + } + return Optional.empty(); + } + + protected void validateProxyEndpoint(String extension, URI endpoint, String clientType) { + if (StringUtils.isBlank(endpoint.getScheme())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - scheme must be specified", + extension, clientType, endpoint.toString())); + } + if (StringUtils.isBlank(endpoint.getHost())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - host must be specified", + extension, clientType, endpoint.toString())); + } + if (StringUtils.isNotBlank(endpoint.getUserInfo())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - user info is not supported.", + extension, clientType, endpoint.toString())); + } + if (StringUtils.isNotBlank(endpoint.getPath())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - path is not supported.", + extension, clientType, endpoint.toString())); + } + if (StringUtils.isNotBlank(endpoint.getQuery())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - query is not supported.", + extension, clientType, endpoint.toString())); + } + if (StringUtils.isNotBlank(endpoint.getFragment())) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - fragment is not supported.", + extension, clientType, endpoint.toString())); + } + } + + protected void validateTlsKeyManagersProvider(String extension, TlsKeyManagersProviderConfig config, String clientType) { + if (config != null && config.type == TlsKeyManagersProviderType.FILE_STORE) { + validateFileStore(extension, clientType, "key", config.fileStore); + } + } + + protected void validateTlsTrustManagersProvider(String extension, TlsTrustManagersProviderConfig config, + String clientType) { + if (config != null && config.type == TlsTrustManagersProviderType.FILE_STORE) { + validateFileStore(extension, clientType, "trust", config.fileStore); + } + } + + protected void validateFileStore(String extension, String clientType, String storeType, + FileStoreTlsManagersProviderConfig fileStore) { + if (fileStore == null) { + throw new RuntimeConfigurationError( + String.format( + "quarkus.%s.%s-client.tls-%s-managers-provider.file-store must be specified if 'FILE_STORE' provider type is used", + extension, clientType, storeType)); + } else { + if (!fileStore.password.isPresent()) { + throw new RuntimeConfigurationError( + String.format( + "quarkus.%s.%s-client.tls-%s-managers-provider.file-store.path should not be empty if 'FILE_STORE' provider is used.", + extension, clientType, storeType)); + } + if (!fileStore.type.isPresent()) { + throw new RuntimeConfigurationError( + String.format( + "quarkus.%s.%s-client.tls-%s-managers-provider.file-store.type should not be empty if 'FILE_STORE' provider is used.", + extension, clientType, storeType)); + } + if (!fileStore.password.isPresent()) { + throw new RuntimeConfigurationError( + String.format( + "quarkus.%s.%s-client.tls-%s-managers-provider.file-store.password should not be empty if 'FILE_STORE' provider is used.", + extension, clientType, storeType)); + } + } + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientApacheTransportRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientApacheTransportRecorder.java new file mode 100644 index 0000000000000..cc1a0c7fffe7f --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientApacheTransportRecorder.java @@ -0,0 +1,59 @@ +package io.quarkus.amazon.common.runtime; + +import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.annotations.Recorder; +import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.http.apache.ApacheHttpClient; +import software.amazon.awssdk.http.apache.ApacheHttpClient.Builder; +import software.amazon.awssdk.http.apache.ProxyConfiguration; + +@Recorder +public class AmazonClientApacheTransportRecorder extends AbstractAmazonClientTransportRecorder { + + @SuppressWarnings("rawtypes") + @Override + public RuntimeValue configureSync(String clientName, + RuntimeValue syncConfigRuntime) { + SyncHttpClientConfig syncConfig = syncConfigRuntime.getValue(); + validateTlsKeyManagersProvider(clientName, syncConfig.tlsKeyManagersProvider, "sync"); + validateTlsTrustManagersProvider(clientName, syncConfig.tlsTrustManagersProvider, "sync"); + Builder builder = ApacheHttpClient.builder(); + validateApacheClientConfig(clientName, syncConfig); + + builder.connectionTimeout(syncConfig.connectionTimeout); + builder.connectionAcquisitionTimeout(syncConfig.apache.connectionAcquisitionTimeout); + builder.connectionMaxIdleTime(syncConfig.apache.connectionMaxIdleTime); + syncConfig.apache.connectionTimeToLive.ifPresent(builder::connectionTimeToLive); + builder.expectContinueEnabled(syncConfig.apache.expectContinueEnabled); + builder.maxConnections(syncConfig.apache.maxConnections); + builder.socketTimeout(syncConfig.socketTimeout); + builder.useIdleConnectionReaper(syncConfig.apache.useIdleConnectionReaper); + + if (syncConfig.apache.proxy.enabled && syncConfig.apache.proxy.endpoint.isPresent()) { + ProxyConfiguration.Builder proxyBuilder = ProxyConfiguration.builder() + .endpoint(syncConfig.apache.proxy.endpoint.get()); + syncConfig.apache.proxy.username.ifPresent(proxyBuilder::username); + syncConfig.apache.proxy.password.ifPresent(proxyBuilder::password); + syncConfig.apache.proxy.nonProxyHosts.ifPresent(c -> c.forEach(proxyBuilder::addNonProxyHost)); + syncConfig.apache.proxy.ntlmDomain.ifPresent(proxyBuilder::ntlmDomain); + syncConfig.apache.proxy.ntlmWorkstation.ifPresent(proxyBuilder::ntlmWorkstation); + syncConfig.apache.proxy.preemptiveBasicAuthenticationEnabled + .ifPresent(proxyBuilder::preemptiveBasicAuthenticationEnabled); + + builder.proxyConfiguration(proxyBuilder.build()); + } + getTlsKeyManagersProvider(syncConfig.tlsKeyManagersProvider).ifPresent(builder::tlsKeyManagersProvider); + getTlsTrustManagersProvider(syncConfig.tlsTrustManagersProvider).ifPresent(builder::tlsTrustManagersProvider); + return new RuntimeValue<>(builder); + } + + private void validateApacheClientConfig(String extension, SyncHttpClientConfig config) { + if (config.apache.maxConnections <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.sync-client.max-connections may not be negative or zero.", extension)); + } + if (config.apache.proxy.enabled) { + config.apache.proxy.endpoint.ifPresent(uri -> validateProxyEndpoint(extension, uri, "sync")); + } + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientNettyTransportRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientNettyTransportRecorder.java new file mode 100644 index 0000000000000..7cd4691b7e2a1 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientNettyTransportRecorder.java @@ -0,0 +1,108 @@ +package io.quarkus.amazon.common.runtime; + +import java.util.Collections; +import java.util.HashSet; + +import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.annotations.Recorder; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.http.nio.netty.Http2Configuration; +import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; +import software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup; +import software.amazon.awssdk.utils.ThreadFactoryBuilder; + +@Recorder +public class AmazonClientNettyTransportRecorder extends AbstractAmazonClientTransportRecorder { + + @SuppressWarnings("rawtypes") + @Override + public RuntimeValue configureAsync(String clientName, + RuntimeValue asyncConfigRuntime) { + NettyNioAsyncHttpClient.Builder builder = NettyNioAsyncHttpClient.builder(); + NettyHttpClientConfig asyncConfig = asyncConfigRuntime.getValue(); + validateNettyClientConfig(clientName, asyncConfig); + + builder.connectionAcquisitionTimeout(asyncConfig.connectionAcquisitionTimeout); + builder.connectionMaxIdleTime(asyncConfig.connectionMaxIdleTime); + builder.connectionTimeout(asyncConfig.connectionTimeout); + asyncConfig.connectionTimeToLive.ifPresent(builder::connectionTimeToLive); + builder.maxConcurrency(asyncConfig.maxConcurrency); + builder.maxPendingConnectionAcquires(asyncConfig.maxPendingConnectionAcquires); + builder.protocol(asyncConfig.protocol); + builder.readTimeout(asyncConfig.readTimeout); + builder.writeTimeout(asyncConfig.writeTimeout); + asyncConfig.sslProvider.ifPresent(builder::sslProvider); + builder.useIdleConnectionReaper(asyncConfig.useIdleConnectionReaper); + + if (asyncConfig.http2.initialWindowSize.isPresent() || asyncConfig.http2.maxStreams.isPresent()) { + Http2Configuration.Builder http2Builder = Http2Configuration.builder(); + asyncConfig.http2.initialWindowSize.ifPresent(http2Builder::initialWindowSize); + asyncConfig.http2.maxStreams.ifPresent(http2Builder::maxStreams); + asyncConfig.http2.healthCheckPingPeriod.ifPresent(http2Builder::healthCheckPingPeriod); + builder.http2Configuration(http2Builder.build()); + } + + if (asyncConfig.proxy.enabled && asyncConfig.proxy.endpoint.isPresent()) { + software.amazon.awssdk.http.nio.netty.ProxyConfiguration.Builder proxyBuilder = software.amazon.awssdk.http.nio.netty.ProxyConfiguration + .builder().scheme(asyncConfig.proxy.endpoint.get().getScheme()) + .host(asyncConfig.proxy.endpoint.get().getHost()) + .nonProxyHosts(new HashSet<>(asyncConfig.proxy.nonProxyHosts.orElse(Collections.emptyList()))); + + if (asyncConfig.proxy.endpoint.get().getPort() != -1) { + proxyBuilder.port(asyncConfig.proxy.endpoint.get().getPort()); + } + builder.proxyConfiguration(proxyBuilder.build()); + } + + getTlsKeyManagersProvider(asyncConfig.tlsKeyManagersProvider).ifPresent(builder::tlsKeyManagersProvider); + getTlsTrustManagersProvider(asyncConfig.tlsTrustManagersProvider).ifPresent(builder::tlsTrustManagersProvider); + + if (asyncConfig.eventLoop.override) { + SdkEventLoopGroup.Builder eventLoopBuilder = SdkEventLoopGroup.builder(); + asyncConfig.eventLoop.numberOfThreads.ifPresent(eventLoopBuilder::numberOfThreads); + if (asyncConfig.eventLoop.threadNamePrefix.isPresent()) { + eventLoopBuilder.threadFactory( + new ThreadFactoryBuilder().threadNamePrefix(asyncConfig.eventLoop.threadNamePrefix.get()).build()); + } + builder.eventLoopGroupBuilder(eventLoopBuilder); + } + + return new RuntimeValue<>(builder); + } + + private void validateNettyClientConfig(String extension, NettyHttpClientConfig config) { + if (config.maxConcurrency <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.async-client.max-concurrency may not be negative or zero.", extension)); + } + + if (config.http2.maxStreams.isPresent() && config.http2.maxStreams.get() <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.async-client.http2.max-streams may not be negative.", extension)); + } + + if (config.http2.initialWindowSize.isPresent() && config.http2.initialWindowSize.getAsInt() <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.async-client.http2.initial-window-size may not be negative.", extension)); + } + + if (config.maxPendingConnectionAcquires <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.async-client.max-pending-connection-acquires may not be negative or zero.", + extension)); + } + if (config.eventLoop.override) { + if (config.eventLoop.numberOfThreads.isPresent() && config.eventLoop.numberOfThreads.getAsInt() <= 0) { + throw new RuntimeConfigurationError( + String.format("quarkus.%s.async-client.event-loop.number-of-threads may not be negative or zero.", + extension)); + } + } + if (config.proxy.enabled) { + config.proxy.endpoint.ifPresent(uri -> validateProxyEndpoint(extension, uri, "async")); + } + + validateTlsKeyManagersProvider(extension, config.tlsKeyManagersProvider, "async"); + validateTlsTrustManagersProvider(extension, config.tlsTrustManagersProvider, "async"); + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientRecorder.java index c6b381aca6904..9e225a4551b56 100644 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientRecorder.java +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientRecorder.java @@ -72,17 +72,19 @@ public void initSdkClient(SdkClientBuilder builder, String extension, SdkConfig config.apiCallAttemptTimeout.ifPresent(overrides::apiCallAttemptTimeout); buildConfig.interceptors.orElse(Collections.emptyList()).stream() + .map(String::trim) .map(this::createInterceptor) .filter(Objects::nonNull) .forEach(overrides::addExecutionInterceptor); builder.overrideConfiguration(overrides.build()); } - private ExecutionInterceptor createInterceptor(Class interceptorClass) { + private ExecutionInterceptor createInterceptor(String interceptorClassName) { try { - return (ExecutionInterceptor) Class.forName(interceptorClass.getName()).newInstance(); + return (ExecutionInterceptor) Class + .forName(interceptorClassName, false, Thread.currentThread().getContextClassLoader()).newInstance(); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { - LOG.error("Unable to create interceptor", e); + LOG.error("Unable to create interceptor " + interceptorClassName, e); return null; } } diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientTransportRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientTransportRecorder.java deleted file mode 100644 index 91f64f34be6ab..0000000000000 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientTransportRecorder.java +++ /dev/null @@ -1,243 +0,0 @@ -package io.quarkus.amazon.common.runtime; - -import java.net.URI; -import java.util.Collections; -import java.util.HashSet; -import java.util.Optional; - -import io.quarkus.amazon.common.runtime.SyncHttpClientBuildTimeConfig.SyncClientType; -import io.quarkus.runtime.RuntimeValue; -import io.quarkus.runtime.annotations.Recorder; -import software.amazon.awssdk.http.SdkHttpClient; -import software.amazon.awssdk.http.TlsKeyManagersProvider; -import software.amazon.awssdk.http.apache.ApacheHttpClient; -import software.amazon.awssdk.http.apache.ApacheHttpClient.Builder; -import software.amazon.awssdk.http.apache.ProxyConfiguration; -import software.amazon.awssdk.http.async.SdkAsyncHttpClient; -import software.amazon.awssdk.http.nio.netty.Http2Configuration; -import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; -import software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup; -import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; -import software.amazon.awssdk.utils.StringUtils; -import software.amazon.awssdk.utils.ThreadFactoryBuilder; - -@Recorder -public class AmazonClientTransportRecorder { - - public RuntimeValue configureSync(String clientName, SyncHttpClientBuildTimeConfig buildConfig, - RuntimeValue syncConfigRuntime) { - - SdkHttpClient.Builder syncBuilder; - SyncHttpClientConfig syncConfig = syncConfigRuntime.getValue(); - - validateTlsManagersProvider(clientName, syncConfig.tlsManagersProvider, "sync"); - - if (buildConfig.type == SyncClientType.APACHE) { - Builder builder = ApacheHttpClient.builder(); - validateApacheClientConfig(clientName, syncConfig); - - builder.connectionTimeout(syncConfig.connectionTimeout); - builder.connectionAcquisitionTimeout(syncConfig.apache.connectionAcquisitionTimeout); - builder.connectionMaxIdleTime(syncConfig.apache.connectionMaxIdleTime); - syncConfig.apache.connectionTimeToLive.ifPresent(builder::connectionTimeToLive); - builder.expectContinueEnabled(syncConfig.apache.expectContinueEnabled); - builder.maxConnections(syncConfig.apache.maxConnections); - builder.socketTimeout(syncConfig.socketTimeout); - builder.useIdleConnectionReaper(syncConfig.apache.useIdleConnectionReaper); - - if (syncConfig.apache.proxy.enabled && syncConfig.apache.proxy.endpoint.isPresent()) { - ProxyConfiguration.Builder proxyBuilder = ProxyConfiguration.builder() - .endpoint(syncConfig.apache.proxy.endpoint.get()); - syncConfig.apache.proxy.username.ifPresent(proxyBuilder::username); - syncConfig.apache.proxy.password.ifPresent(proxyBuilder::password); - syncConfig.apache.proxy.nonProxyHosts.ifPresent(c -> c.forEach(proxyBuilder::addNonProxyHost)); - syncConfig.apache.proxy.ntlmDomain.ifPresent(proxyBuilder::ntlmDomain); - syncConfig.apache.proxy.ntlmWorkstation.ifPresent(proxyBuilder::ntlmWorkstation); - syncConfig.apache.proxy.preemptiveBasicAuthenticationEnabled - .ifPresent(proxyBuilder::preemptiveBasicAuthenticationEnabled); - - builder.proxyConfiguration(proxyBuilder.build()); - } - getTlsKeyManagersProvider(syncConfig.tlsManagersProvider).ifPresent(builder::tlsKeyManagersProvider); - - syncBuilder = builder; - } else { - UrlConnectionHttpClient.Builder builder = UrlConnectionHttpClient.builder(); - builder.connectionTimeout(syncConfig.connectionTimeout); - builder.socketTimeout(syncConfig.socketTimeout); - getTlsKeyManagersProvider(syncConfig.tlsManagersProvider).ifPresent(builder::tlsKeyManagersProvider); - - syncBuilder = builder; - } - - return new RuntimeValue<>(syncBuilder); - } - - public RuntimeValue configureAsync(String clientName, - RuntimeValue asyncConfigRuntime) { - NettyNioAsyncHttpClient.Builder builder = NettyNioAsyncHttpClient.builder(); - NettyHttpClientConfig asyncConfig = asyncConfigRuntime.getValue(); - validateNettyClientConfig(clientName, asyncConfig); - - builder.connectionAcquisitionTimeout(asyncConfig.connectionAcquisitionTimeout); - builder.connectionMaxIdleTime(asyncConfig.connectionMaxIdleTime); - builder.connectionTimeout(asyncConfig.connectionTimeout); - asyncConfig.connectionTimeToLive.ifPresent(builder::connectionTimeToLive); - builder.maxConcurrency(asyncConfig.maxConcurrency); - builder.maxPendingConnectionAcquires(asyncConfig.maxPendingConnectionAcquires); - builder.protocol(asyncConfig.protocol); - builder.readTimeout(asyncConfig.readTimeout); - builder.writeTimeout(asyncConfig.writeTimeout); - asyncConfig.sslProvider.ifPresent(builder::sslProvider); - builder.useIdleConnectionReaper(asyncConfig.useIdleConnectionReaper); - - if (asyncConfig.http2.initialWindowSize.isPresent() || asyncConfig.http2.maxStreams.isPresent()) { - Http2Configuration.Builder http2Builder = Http2Configuration.builder(); - asyncConfig.http2.initialWindowSize.ifPresent(http2Builder::initialWindowSize); - asyncConfig.http2.maxStreams.ifPresent(http2Builder::maxStreams); - asyncConfig.http2.healthCheckPingPeriod.ifPresent(http2Builder::healthCheckPingPeriod); - builder.http2Configuration(http2Builder.build()); - } - - if (asyncConfig.proxy.enabled && asyncConfig.proxy.endpoint.isPresent()) { - software.amazon.awssdk.http.nio.netty.ProxyConfiguration.Builder proxyBuilder = software.amazon.awssdk.http.nio.netty.ProxyConfiguration - .builder().scheme(asyncConfig.proxy.endpoint.get().getScheme()) - .host(asyncConfig.proxy.endpoint.get().getHost()) - .nonProxyHosts(new HashSet<>(asyncConfig.proxy.nonProxyHosts.orElse(Collections.emptyList()))); - - if (asyncConfig.proxy.endpoint.get().getPort() != -1) { - proxyBuilder.port(asyncConfig.proxy.endpoint.get().getPort()); - } - builder.proxyConfiguration(proxyBuilder.build()); - } - - getTlsKeyManagersProvider(asyncConfig.tlsManagersProvider).ifPresent(builder::tlsKeyManagersProvider); - - if (asyncConfig.eventLoop.override) { - SdkEventLoopGroup.Builder eventLoopBuilder = SdkEventLoopGroup.builder(); - asyncConfig.eventLoop.numberOfThreads.ifPresent(eventLoopBuilder::numberOfThreads); - if (asyncConfig.eventLoop.threadNamePrefix.isPresent()) { - eventLoopBuilder.threadFactory( - new ThreadFactoryBuilder().threadNamePrefix(asyncConfig.eventLoop.threadNamePrefix.get()).build()); - } - builder.eventLoopGroupBuilder(eventLoopBuilder); - } - - return new RuntimeValue<>(builder); - } - - private Optional getTlsKeyManagersProvider(TlsManagersProviderConfig config) { - if (config.fileStore != null && config.fileStore.path.isPresent() && config.fileStore.type.isPresent()) { - return Optional.of(config.type.create(config)); - } - return Optional.empty(); - } - - private void validateApacheClientConfig(String extension, SyncHttpClientConfig config) { - if (config.apache.maxConnections <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.sync-client.max-connections may not be negative or zero.", extension)); - } - if (config.apache.proxy.enabled) { - config.apache.proxy.endpoint.ifPresent(uri -> validateProxyEndpoint(extension, uri, "sync")); - } - } - - private void validateNettyClientConfig(String extension, NettyHttpClientConfig config) { - if (config.maxConcurrency <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.async-client.max-concurrency may not be negative or zero.", extension)); - } - - if (config.http2.maxStreams.isPresent() && config.http2.maxStreams.get() <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.async-client.http2.max-streams may not be negative.", extension)); - } - - if (config.http2.initialWindowSize.isPresent() && config.http2.initialWindowSize.getAsInt() <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.async-client.http2.initial-window-size may not be negative.", extension)); - } - - if (config.maxPendingConnectionAcquires <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.async-client.max-pending-connection-acquires may not be negative or zero.", - extension)); - } - if (config.eventLoop.override) { - if (config.eventLoop.numberOfThreads.isPresent() && config.eventLoop.numberOfThreads.getAsInt() <= 0) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.async-client.event-loop.number-of-threads may not be negative or zero.", - extension)); - } - } - if (config.proxy.enabled) { - config.proxy.endpoint.ifPresent(uri -> validateProxyEndpoint(extension, uri, "async")); - } - - validateTlsManagersProvider(extension, config.tlsManagersProvider, "async"); - } - - private void validateProxyEndpoint(String extension, URI endpoint, String clientType) { - if (StringUtils.isBlank(endpoint.getScheme())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - scheme must be specified", - extension, clientType, endpoint.toString())); - } - if (StringUtils.isBlank(endpoint.getHost())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - host must be specified", - extension, clientType, endpoint.toString())); - } - if (StringUtils.isNotBlank(endpoint.getUserInfo())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - user info is not supported.", - extension, clientType, endpoint.toString())); - } - if (StringUtils.isNotBlank(endpoint.getPath())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - path is not supported.", - extension, clientType, endpoint.toString())); - } - if (StringUtils.isNotBlank(endpoint.getQuery())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - query is not supported.", - extension, clientType, endpoint.toString())); - } - if (StringUtils.isNotBlank(endpoint.getFragment())) { - throw new RuntimeConfigurationError( - String.format("quarkus.%s.%s-client.proxy.endpoint (%s) - fragment is not supported.", - extension, clientType, endpoint.toString())); - } - } - - private void validateTlsManagersProvider(String extension, TlsManagersProviderConfig config, String clientType) { - if (config != null && config.type == TlsManagersProviderType.FILE_STORE) { - if (config.fileStore == null) { - throw new RuntimeConfigurationError( - String.format( - "quarkus.%s.%s-client.tls-managers-provider.file-store must be specified if 'FILE_STORE' provider type is used", - extension, clientType)); - } else { - if (!config.fileStore.password.isPresent()) { - throw new RuntimeConfigurationError( - String.format( - "quarkus.%s.%s-client.tls-managers-provider.file-store.path should not be empty if 'FILE_STORE' provider is used.", - extension, clientType)); - } - if (!config.fileStore.type.isPresent()) { - throw new RuntimeConfigurationError( - String.format( - "quarkus.%s.%s-client.tls-managers-provider.file-store.type should not be empty if 'FILE_STORE' provider is used.", - extension, clientType)); - } - if (!config.fileStore.password.isPresent()) { - throw new RuntimeConfigurationError( - String.format( - "quarkus.%s.%s-client.tls-managers-provider.file-store.password should not be empty if 'FILE_STORE' provider is used.", - extension, clientType)); - } - } - } - } -} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientUrlConnectionTransportRecorder.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientUrlConnectionTransportRecorder.java new file mode 100644 index 0000000000000..591eaa08ceb96 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AmazonClientUrlConnectionTransportRecorder.java @@ -0,0 +1,25 @@ +package io.quarkus.amazon.common.runtime; + +import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.annotations.Recorder; +import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; + +@Recorder +public class AmazonClientUrlConnectionTransportRecorder extends AbstractAmazonClientTransportRecorder { + + @SuppressWarnings("rawtypes") + @Override + public RuntimeValue configureSync(String clientName, + RuntimeValue syncConfigRuntime) { + SyncHttpClientConfig syncConfig = syncConfigRuntime.getValue(); + validateTlsKeyManagersProvider(clientName, syncConfig.tlsKeyManagersProvider, "sync"); + validateTlsTrustManagersProvider(clientName, syncConfig.tlsTrustManagersProvider, "sync"); + UrlConnectionHttpClient.Builder builder = UrlConnectionHttpClient.builder(); + builder.connectionTimeout(syncConfig.connectionTimeout); + builder.socketTimeout(syncConfig.socketTimeout); + getTlsKeyManagersProvider(syncConfig.tlsKeyManagersProvider).ifPresent(builder::tlsKeyManagersProvider); + getTlsTrustManagersProvider(syncConfig.tlsTrustManagersProvider).ifPresent(builder::tlsTrustManagersProvider); + return new RuntimeValue<>(builder); + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AwsCredentialsProviderConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AwsCredentialsProviderConfig.java index 70dacdf9e9e2f..cb0c0313e6b7f 100644 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AwsCredentialsProviderConfig.java +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/AwsCredentialsProviderConfig.java @@ -18,7 +18,7 @@ public class AwsCredentialsProviderConfig { * Available values: * * * `default` - the provider will attempt to identify the credentials automatically using the following checks: - * ** Java System Properties - `aws.accessKeyId` and `aws.secretKey` + * ** Java System Properties - `aws.accessKeyId` and `aws.secretAccessKey` * ** Environment Variables - `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` * ** Credential profiles file at the default location (`~/.aws/credentials`) shared by all AWS SDKs and the AWS CLI * ** Credentials delivered through the Amazon EC2 container service if `AWS_CONTAINER_CREDENTIALS_RELATIVE_URI` environment variable is set and security manager has permission to access the variable. diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsManagersProviderConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsManagersProviderConfig.java new file mode 100644 index 0000000000000..be4e086162189 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsManagersProviderConfig.java @@ -0,0 +1,34 @@ +package io.quarkus.amazon.common.runtime; + +import java.nio.file.Path; +import java.util.Optional; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class FileStoreTlsManagersProviderConfig { + + /** + * Path to the key store. + */ + @ConfigItem + public Optional path; + + /** + * Key store type. + *

    + * See the KeyStore section in + * the https://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html#KeyStore[Java Cryptography + * Architecture Standard Algorithm Name Documentation] + * for information about standard keystore types. + */ + @ConfigItem + public Optional type; + + /** + * Key store password + */ + @ConfigItem + public Optional password; +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsTrustManagersProvider.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsTrustManagersProvider.java new file mode 100644 index 0000000000000..bb059c2b4fe0d --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/FileStoreTlsTrustManagersProvider.java @@ -0,0 +1,50 @@ +package io.quarkus.amazon.common.runtime; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; + +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import software.amazon.awssdk.http.TlsTrustManagersProvider; + +/** + * A TlsTrustManagersProvider for creating trustmanagers from a file-based truststore + */ +public class FileStoreTlsTrustManagersProvider implements TlsTrustManagersProvider { + + private final Path path; + private final String type; + private final char[] password; + + public FileStoreTlsTrustManagersProvider(FileStoreTlsManagersProviderConfig fileStore) { + path = fileStore.path.get(); + type = fileStore.type.get(); + password = fileStore.password.map(String::toCharArray).orElse(null); + } + + @Override + public TrustManager[] trustManagers() { + try (InputStream storeInputStream = Files.newInputStream(path)) { + KeyStore keyStore = KeyStore.getInstance(type); + keyStore.load(storeInputStream, password); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(keyStore); + return tmf.getTrustManagers(); + } catch (KeyStoreException | CertificateException | NoSuchAlgorithmException | IOException e) { + LOGGER.error("Failed to load truststore", e); + return null; + } + } + + private static final Logger LOGGER = LoggerFactory.getLogger(FileStoreTlsTrustManagersProvider.class); +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NettyHttpClientConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NettyHttpClientConfig.java index bf3217ab94d01..019eb58608cdb 100644 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NettyHttpClientConfig.java +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NettyHttpClientConfig.java @@ -70,7 +70,7 @@ public class NettyHttpClientConfig { *

    * Currently has no effect if `quarkus..async-client.use-idle-connection-reaper` is false. */ - @ConfigItem(defaultValue = "60S") + @ConfigItem(defaultValue = "5S") public Duration connectionMaxIdleTime; /** @@ -109,10 +109,16 @@ public class NettyHttpClientConfig { public NettyProxyConfiguration proxy; /** - * TLS Managers provider configuration + * TLS Key Managers provider configuration */ @ConfigItem - public TlsManagersProviderConfig tlsManagersProvider; + public TlsKeyManagersProviderConfig tlsKeyManagersProvider; + + /** + * TLS Trust Managers provider configuration + */ + @ConfigItem + public TlsTrustManagersProviderConfig tlsTrustManagersProvider; /** * Netty event loop configuration override diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NoneTlsTrustManagersProvider.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NoneTlsTrustManagersProvider.java new file mode 100644 index 0000000000000..e4c0cf010c9c1 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/NoneTlsTrustManagersProvider.java @@ -0,0 +1,39 @@ +package io.quarkus.amazon.common.runtime; + +import java.security.cert.X509Certificate; + +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +import software.amazon.awssdk.http.TlsTrustManagersProvider; + +/** + * A TlsTrustManagersProvider that creates a trustmanager trusting all certificates + */ +public class NoneTlsTrustManagersProvider implements TlsTrustManagersProvider { + private static final NoneTlsTrustManagersProvider INSTANCE = new NoneTlsTrustManagersProvider(); + + @Override + public TrustManager[] trustManagers() { + return new TrustManager[] { new InsecureTrustManager() }; + } + + private static class InsecureTrustManager implements X509TrustManager { + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) { + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) { + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return new X509Certificate[] {}; + } + } + + public static NoneTlsTrustManagersProvider getInstance() { + return INSTANCE; + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SdkBuildTimeConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SdkBuildTimeConfig.java index 364aab283b9bc..4f366aecff950 100644 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SdkBuildTimeConfig.java +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SdkBuildTimeConfig.java @@ -22,5 +22,5 @@ public class SdkBuildTimeConfig { * @see software.amazon.awssdk.core.interceptor.ExecutionInterceptor */ @ConfigItem - public Optional>> interceptors; + public Optional> interceptors; // cannot be classes as can be runtime initialized (e.g. XRay interceptor) } diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SyncHttpClientConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SyncHttpClientConfig.java index 2e34970217785..67574a0a4e021 100644 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SyncHttpClientConfig.java +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SyncHttpClientConfig.java @@ -25,10 +25,16 @@ public class SyncHttpClientConfig { public Duration socketTimeout; /** - * TLS Managers provider configuration + * TLS Key Managers provider configuration */ @ConfigItem - public TlsManagersProviderConfig tlsManagersProvider; + public TlsKeyManagersProviderConfig tlsKeyManagersProvider; + + /** + * TLS Trust Managers provider configuration + */ + @ConfigItem + public TlsTrustManagersProviderConfig tlsTrustManagersProvider; /** * Apache HTTP client specific configurations diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SystemPropertyTlsTrustManagersProvider.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SystemPropertyTlsTrustManagersProvider.java new file mode 100644 index 0000000000000..7292f5ce14874 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/SystemPropertyTlsTrustManagersProvider.java @@ -0,0 +1,21 @@ +package io.quarkus.amazon.common.runtime; + +import javax.net.ssl.TrustManager; + +import software.amazon.awssdk.http.TlsTrustManagersProvider; + +/** + * A TlsTrustManagersProvider for loading system default truststore configuration + */ +public class SystemPropertyTlsTrustManagersProvider implements TlsTrustManagersProvider { + private static final SystemPropertyTlsTrustManagersProvider INSTANCE = new SystemPropertyTlsTrustManagersProvider(); + + @Override + public TrustManager[] trustManagers() { + return null; + } + + public static SystemPropertyTlsTrustManagersProvider getInstance() { + return INSTANCE; + } +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderConfig.java new file mode 100644 index 0000000000000..22101dd1920d4 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderConfig.java @@ -0,0 +1,35 @@ +package io.quarkus.amazon.common.runtime; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class TlsKeyManagersProviderConfig { + + // @formatter:off + /** + * TLS key managers provider type. + * + * Available providers: + * + * * `none` - Use this provider if you don't want the client to present any certificates to the remote TLS host. + * * `system-property` - Provider checks the standard `javax.net.ssl.keyStore`, `javax.net.ssl.keyStorePassword`, and + * `javax.net.ssl.keyStoreType` properties defined by the + * https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html[JSSE]. + * * `file-store` - Provider that loads a the key store from a file. + * + * @asciidoclet + */ + // @formatter:on + @ConfigItem(defaultValue = "system-property") + public TlsKeyManagersProviderType type; + + /** + * Configuration of the file store provider. + *

    + * Used only if {@code FILE_STORE} type is chosen. + */ + @ConfigItem + public FileStoreTlsManagersProviderConfig fileStore; + +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderType.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderType.java new file mode 100644 index 0000000000000..1b383510fa4a9 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsKeyManagersProviderType.java @@ -0,0 +1,30 @@ +package io.quarkus.amazon.common.runtime; + +import software.amazon.awssdk.http.FileStoreTlsKeyManagersProvider; +import software.amazon.awssdk.http.SystemPropertyTlsKeyManagersProvider; +import software.amazon.awssdk.http.TlsKeyManagersProvider; + +public enum TlsKeyManagersProviderType { + NONE { + @Override + public TlsKeyManagersProvider create(TlsKeyManagersProviderConfig config) { + return TlsKeyManagersProvider.noneProvider(); + } + }, + SYSTEM_PROPERTY { + @Override + public TlsKeyManagersProvider create(TlsKeyManagersProviderConfig config) { + return SystemPropertyTlsKeyManagersProvider.create(); + } + }, + FILE_STORE { + @Override + public TlsKeyManagersProvider create(TlsKeyManagersProviderConfig config) { + final FileStoreTlsManagersProviderConfig fileStore = config.fileStore; + return FileStoreTlsKeyManagersProvider.create(fileStore.path.get(), fileStore.type.get(), + fileStore.password.orElse(null)); + } + }; + + public abstract TlsKeyManagersProvider create(TlsKeyManagersProviderConfig config); +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderConfig.java deleted file mode 100644 index 543dced36c0fe..0000000000000 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderConfig.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.quarkus.amazon.common.runtime; - -import java.nio.file.Path; -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -@ConfigGroup -public class TlsManagersProviderConfig { - - // @formatter:off - /** - * TLS managers provider type. - * - * Available providers: - * - * * `none` - Use this provider if you don't want the client to present any certificates to the remote TLS host. - * * `system-property` - Provider checks the standard `javax.net.ssl.keyStore`, `javax.net.ssl.keyStorePassword`, and - * `javax.net.ssl.keyStoreType` properties defined by the - * https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html[JSSE]. - * * `file-store` - Provider that loads a the key store from a file. - * - * @asciidoclet - */ - // @formatter:on - @ConfigItem(defaultValue = "system-property") - public TlsManagersProviderType type; - - /** - * Configuration of the file store provider. - *

    - * Used only if {@code FILE_STORE} type is chosen. - */ - @ConfigItem - public FileStoreTlsManagersProviderConfig fileStore; - - @ConfigGroup - public static class FileStoreTlsManagersProviderConfig { - - /** - * Path to the key store. - */ - @ConfigItem - public Optional path; - - /** - * Key store type. - *

    - * See the KeyStore section in - * the https://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html#KeyStore[Java Cryptography - * Architecture Standard Algorithm Name Documentation] - * for information about standard keystore types. - */ - @ConfigItem - public Optional type; - - /** - * Key store password - */ - @ConfigItem - public Optional password; - } - -} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderType.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderType.java deleted file mode 100644 index bd5ebce5ea5f6..0000000000000 --- a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsManagersProviderType.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.amazon.common.runtime; - -import software.amazon.awssdk.http.FileStoreTlsKeyManagersProvider; -import software.amazon.awssdk.http.SystemPropertyTlsKeyManagersProvider; -import software.amazon.awssdk.http.TlsKeyManagersProvider; - -public enum TlsManagersProviderType { - NONE { - @Override - public TlsKeyManagersProvider create(TlsManagersProviderConfig config) { - return TlsKeyManagersProvider.noneProvider(); - } - }, - SYSTEM_PROPERTY { - @Override - public TlsKeyManagersProvider create(TlsManagersProviderConfig config) { - return SystemPropertyTlsKeyManagersProvider.create(); - } - }, - FILE_STORE { - @Override - public TlsKeyManagersProvider create(TlsManagersProviderConfig config) { - final TlsManagersProviderConfig.FileStoreTlsManagersProviderConfig fileStore = config.fileStore; - return FileStoreTlsKeyManagersProvider.create(fileStore.path.get(), fileStore.type.get(), - fileStore.password.orElse(null)); - } - }; - - public abstract TlsKeyManagersProvider create(TlsManagersProviderConfig config); -} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderConfig.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderConfig.java new file mode 100644 index 0000000000000..97f0f10150506 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderConfig.java @@ -0,0 +1,35 @@ +package io.quarkus.amazon.common.runtime; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class TlsTrustManagersProviderConfig { + + // @formatter:off + /** + * TLS trust managers provider type. + * + * Available providers: + * + * * `trust-all` - Use this provider to disable the validation of servers certificates and therefor turst all server certificates. + * * `system-property` - Provider checks the standard `javax.net.ssl.keyStore`, `javax.net.ssl.keyStorePassword`, and + * `javax.net.ssl.keyStoreType` properties defined by the + * https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html[JSSE]. + * * `file-store` - Provider that loads a the key store from a file. + * + * @asciidoclet + */ + // @formatter:on + @ConfigItem(defaultValue = "system-property") + public TlsTrustManagersProviderType type; + + /** + * Configuration of the file store provider. + *

    + * Used only if {@code FILE_STORE} type is chosen. + */ + @ConfigItem + public FileStoreTlsManagersProviderConfig fileStore; + +} diff --git a/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderType.java b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderType.java new file mode 100644 index 0000000000000..05fd05083be84 --- /dev/null +++ b/extensions/amazon-services/common/runtime/src/main/java/io/quarkus/amazon/common/runtime/TlsTrustManagersProviderType.java @@ -0,0 +1,26 @@ +package io.quarkus.amazon.common.runtime; + +import software.amazon.awssdk.http.TlsTrustManagersProvider; + +public enum TlsTrustManagersProviderType { + TRUST_ALL { + @Override + public TlsTrustManagersProvider create(TlsTrustManagersProviderConfig config) { + return new NoneTlsTrustManagersProvider(); + } + }, + SYSTEM_PROPERTY { + @Override + public TlsTrustManagersProvider create(TlsTrustManagersProviderConfig config) { + return new SystemPropertyTlsTrustManagersProvider(); + } + }, + FILE_STORE { + @Override + public TlsTrustManagersProvider create(TlsTrustManagersProviderConfig config) { + return new FileStoreTlsTrustManagersProvider(config.fileStore); + } + }; + + public abstract TlsTrustManagersProvider create(TlsTrustManagersProviderConfig config); +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/main/java/io/quarkus/amazon/dynamodb/deployment/DynamodbProcessor.java b/extensions/amazon-services/dynamodb/deployment/src/main/java/io/quarkus/amazon/dynamodb/deployment/DynamodbProcessor.java index ee98f3ee319e5..6a2e0d57d33c0 100644 --- a/extensions/amazon-services/dynamodb/deployment/src/main/java/io/quarkus/amazon/dynamodb/deployment/DynamodbProcessor.java +++ b/extensions/amazon-services/dynamodb/deployment/src/main/java/io/quarkus/amazon/dynamodb/deployment/DynamodbProcessor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.dynamodb.runtime.DynamodbBuildTimeConfig; import io.quarkus.amazon.dynamodb.runtime.DynamodbClientProducer; import io.quarkus.amazon.dynamodb.runtime.DynamodbConfig; @@ -75,26 +79,51 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.sdk, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, DynamodbRecorder recorder, - AmazonClientTransportRecorder transportRecorder, - DynamodbConfig runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, DynamodbRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + DynamodbConfig runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, DynamodbRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + DynamodbConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, DynamodbRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + DynamodbConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, DynamodbRecorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, DynamodbRecorder recorder, DynamodbConfig runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsFileStoreConfigTest.java deleted file mode 100644 index 047da6262e9e7..0000000000000 --- a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsFileStoreConfigTest.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.quarkus.amazon.dynamodb.deployment; - -import javax.inject.Inject; - -import org.jboss.shrinkwrap.api.ShrinkWrap; -import org.jboss.shrinkwrap.api.spec.JavaArchive; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.test.QuarkusUnitTest; -import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient; - -public class DynamodbAsyncClientTlsFileStoreConfigTest { - - @Inject - DynamoDbAsyncClient client; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) - .addAsResource("async-tls-filestore-config.properties", "application.properties")); - - @Test - public void test() { - // Application should start with full config. - } -} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsKeyFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsKeyFileStoreConfigTest.java new file mode 100644 index 0000000000000..1b0a647150c13 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsKeyFileStoreConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient; + +public class DynamodbAsyncClientTlsKeyFileStoreConfigTest { + + @Inject + DynamoDbAsyncClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("async-tls-key-filestore-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustFileStoreConfigTest.java new file mode 100644 index 0000000000000..c48476f6e5456 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustFileStoreConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient; + +public class DynamodbAsyncClientTlsTrustFileStoreConfigTest { + + @Inject + DynamoDbAsyncClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("async-tls-trust-filestore-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustTrustAllConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustTrustAllConfigTest.java new file mode 100644 index 0000000000000..c87fe02d3f9ed --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbAsyncClientTlsTrustTrustAllConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient; + +public class DynamodbAsyncClientTlsTrustTrustAllConfigTest { + + @Inject + DynamoDbAsyncClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("async-tls-trust-trustall-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsFileStoreConfigTest.java deleted file mode 100644 index 84cb010bb95a0..0000000000000 --- a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsFileStoreConfigTest.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.quarkus.amazon.dynamodb.deployment; - -import javax.inject.Inject; - -import org.jboss.shrinkwrap.api.ShrinkWrap; -import org.jboss.shrinkwrap.api.spec.JavaArchive; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.test.QuarkusUnitTest; -import software.amazon.awssdk.services.dynamodb.DynamoDbClient; - -public class DynamodbSyncClientTlsFileStoreConfigTest { - - @Inject - DynamoDbClient client; - - @RegisterExtension - static final QuarkusUnitTest config = new QuarkusUnitTest() - .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) - .addAsResource("sync-tls-filestore-config.properties", "application.properties")); - - @Test - public void test() { - // Application should start with full config. - } -} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsKeyFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsKeyFileStoreConfigTest.java new file mode 100644 index 0000000000000..06fa6ddb85dcb --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsKeyFileStoreConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; + +public class DynamodbSyncClientTlsKeyFileStoreConfigTest { + + @Inject + DynamoDbClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("sync-tls-key-filestore-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustFileStoreConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustFileStoreConfigTest.java new file mode 100644 index 0000000000000..dd9a300518e53 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustFileStoreConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; + +public class DynamodbSyncClientTlsTrustFileStoreConfigTest { + + @Inject + DynamoDbClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("sync-tls-trust-filestore-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustTrustAllConfigTest.java b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustTrustAllConfigTest.java new file mode 100644 index 0000000000000..12b43826df709 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/java/io/quarkus/amazon/dynamodb/deployment/DynamodbSyncClientTlsTrustTrustAllConfigTest.java @@ -0,0 +1,27 @@ +package io.quarkus.amazon.dynamodb.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; + +public class DynamodbSyncClientTlsTrustTrustAllConfigTest { + + @Inject + DynamoDbClient client; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("sync-tls-trust-trustall-config.properties", "application.properties")); + + @Test + public void test() { + // Application should start with full config. + } +} diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-full-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-full-config.properties index d7eb7c06ba241..809e195becff9 100644 --- a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-full-config.properties +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-full-config.properties @@ -28,4 +28,5 @@ quarkus.dynamodb.async-client.event-loop.thread-name-prefix = Quarkus-Netty-Even quarkus.dynamodb.async-client.proxy.enabled = true quarkus.dynamodb.async-client.proxy.endpoint = http://127.1.1.1 quarkus.dynamodb.async-client.proxy.non-proxy-hosts = localhost, hostlocal -quarkus.dynamodb.async-client.tls-managers-provider.type=system-property \ No newline at end of file +quarkus.dynamodb.async-client.tls-key-managers-provider.type=system-property +quarkus.dynamodb.async-client.tls-trust-managers-provider.type=system-property diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-filestore-config.properties deleted file mode 100644 index 65ffaa596619f..0000000000000 --- a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-filestore-config.properties +++ /dev/null @@ -1,6 +0,0 @@ -quarkus.dynamodb.aws.region=us-east-1 - -quarkus.dynamodb.async-client.tls-managers-provider.type=file-store -quarkus.dynamodb.async-client.tls-managers-provider.file-store.path=/tmp/file.key -quarkus.dynamodb.async-client.tls-managers-provider.file-store.type=pkcs11 -quarkus.dynamodb.async-client.tls-managers-provider.file-store.password=thePassword \ No newline at end of file diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-key-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-key-filestore-config.properties new file mode 100644 index 0000000000000..b52db5f3ca9a3 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-key-filestore-config.properties @@ -0,0 +1,6 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.async-client.tls-key-managers-provider.type=file-store +quarkus.dynamodb.async-client.tls-key-managers-provider.file-store.path=/tmp/file.key +quarkus.dynamodb.async-client.tls-key-managers-provider.file-store.type=pkcs11 +quarkus.dynamodb.async-client.tls-key-managers-provider.file-store.password=thePassword diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-filestore-config.properties new file mode 100644 index 0000000000000..e3da5c1d379c9 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-filestore-config.properties @@ -0,0 +1,6 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.async-client.tls-trust-managers-provider.type=file-store +quarkus.dynamodb.async-client.tls-trust-managers-provider.file-store.path=/tmp/file.key +quarkus.dynamodb.async-client.tls-trust-managers-provider.file-store.type=pkcs11 +quarkus.dynamodb.async-client.tls-trust-managers-provider.file-store.password=thePassword diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-trustall-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-trustall-config.properties new file mode 100644 index 0000000000000..8dc3d37155d51 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/async-tls-trust-trustall-config.properties @@ -0,0 +1,3 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.async-client.tls-trust-managers-provider.type=trust-all diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-apache-full-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-apache-full-config.properties index 0f9933147e73e..009b577b7758b 100644 --- a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-apache-full-config.properties +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-apache-full-config.properties @@ -11,7 +11,8 @@ quarkus.dynamodb.aws.credentials.static-provider.secret-access-key=test-secret quarkus.dynamodb.sync-client.type = apache quarkus.dynamodb.sync-client.connection-timeout = 0.100S quarkus.dynamodb.sync-client.socket-timeout = 0.100S -quarkus.dynamodb.sync-client.tls-managers-provider.type=system-property +quarkus.dynamodb.sync-client.tls-key-managers-provider.type=system-property +quarkus.dynamodb.sync-client.tls-trust-managers-provider.type=system-property quarkus.dynamodb.sync-client.apache.connection-acquisition-timeout = 0.100S quarkus.dynamodb.sync-client.apache.connection-max-idle-time = 0.100S diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-filestore-config.properties deleted file mode 100644 index 1c91626607373..0000000000000 --- a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-filestore-config.properties +++ /dev/null @@ -1,6 +0,0 @@ -quarkus.dynamodb.aws.region=us-east-1 - -quarkus.dynamodb.sync-client.tls-managers-provider.type=file-store -quarkus.dynamodb.sync-client.tls-managers-provider.file-store.path=/tmp/file.key -quarkus.dynamodb.sync-client.tls-managers-provider.file-store.type=pkcs11 -quarkus.dynamodb.sync-client.tls-managers-provider.file-store.password=thePassword \ No newline at end of file diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-key-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-key-filestore-config.properties new file mode 100644 index 0000000000000..51c963429bd83 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-key-filestore-config.properties @@ -0,0 +1,6 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.sync-client.tls-key-managers-provider.type=file-store +quarkus.dynamodb.sync-client.tls-key-managers-provider.file-store.path=/tmp/file.key +quarkus.dynamodb.sync-client.tls-key-managers-provider.file-store.type=pkcs11 +quarkus.dynamodb.sync-client.tls-key-managers-provider.file-store.password=thePassword diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-filestore-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-filestore-config.properties new file mode 100644 index 0000000000000..3e551cd316b95 --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-filestore-config.properties @@ -0,0 +1,6 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.sync-client.tls-trust-managers-provider.type=file-store +quarkus.dynamodb.sync-client.tls-trust-managers-provider.file-store.path=/tmp/file.key +quarkus.dynamodb.sync-client.tls-trust-managers-provider.file-store.type=pkcs11 +quarkus.dynamodb.sync-client.tls-trust-managers-provider.file-store.password=thePassword diff --git a/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-trustall-config.properties b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-trustall-config.properties new file mode 100644 index 0000000000000..b92d397b60b3f --- /dev/null +++ b/extensions/amazon-services/dynamodb/deployment/src/test/resources/sync-tls-trust-trustall-config.properties @@ -0,0 +1,3 @@ +quarkus.dynamodb.aws.region=us-east-1 + +quarkus.dynamodb.sync-client.tls-trust-managers-provider.type=trust-all diff --git a/extensions/amazon-services/dynamodb/runtime/pom.xml b/extensions/amazon-services/dynamodb/runtime/pom.xml index 9a475c0e3900e..d700cb81061b6 100644 --- a/extensions/amazon-services/dynamodb/runtime/pom.xml +++ b/extensions/amazon-services/dynamodb/runtime/pom.xml @@ -35,6 +35,21 @@ software.amazon.awssdk dynamodb + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/amazon-services/iam/deployment/pom.xml b/extensions/amazon-services/iam/deployment/pom.xml new file mode 100644 index 0000000000000..366dfcbacad36 --- /dev/null +++ b/extensions/amazon-services/iam/deployment/pom.xml @@ -0,0 +1,77 @@ + + + 4.0.0 + + + io.quarkus + quarkus-amazon-iam-parent + 999-SNAPSHOT + + + quarkus-amazon-iam-deployment + Quarkus - Amazon Services - IAM - Deployment + + + + io.quarkus + quarkus-core-deployment + + + io.quarkus + quarkus-arc-deployment + + + io.quarkus + quarkus-netty-deployment + + + io.quarkus + quarkus-amazon-common-deployment + + + io.quarkus + quarkus-amazon-iam + + + + + io.quarkus + quarkus-junit5-internal + test + + + io.rest-assured + rest-assured + test + + + software.amazon.awssdk + netty-nio-client + test + + + software.amazon.awssdk + url-connection-client + test + + + + + + + maven-compiler-plugin + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + diff --git a/extensions/amazon-services/iam/deployment/src/main/java/io/quarkus/amazon/iam/deployment/IamProcessor.java b/extensions/amazon-services/iam/deployment/src/main/java/io/quarkus/amazon/iam/deployment/IamProcessor.java new file mode 100644 index 0000000000000..54016a4c4cabe --- /dev/null +++ b/extensions/amazon-services/iam/deployment/src/main/java/io/quarkus/amazon/iam/deployment/IamProcessor.java @@ -0,0 +1,152 @@ +package io.quarkus.amazon.iam.deployment; + +import java.util.List; + +import org.jboss.jandex.DotName; + +import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; +import io.quarkus.amazon.iam.runtime.IamBuildTimeConfig; +import io.quarkus.amazon.iam.runtime.IamClientProducer; +import io.quarkus.amazon.iam.runtime.IamConfig; +import io.quarkus.amazon.iam.runtime.IamRecorder; +import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.arc.deployment.BeanContainerBuildItem; +import io.quarkus.arc.deployment.BeanRegistrationPhaseBuildItem; +import io.quarkus.deployment.Feature; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem; +import io.quarkus.deployment.builditem.FeatureBuildItem; +import io.quarkus.deployment.builditem.ShutdownContextBuildItem; +import software.amazon.awssdk.services.iam.IamAsyncClient; +import software.amazon.awssdk.services.iam.IamClient; + +public class IamProcessor extends AbstractAmazonServiceProcessor { + + IamBuildTimeConfig buildTimeConfig; + + @Override + protected Feature amazonServiceClientName() { + return Feature.AMAZON_IAM; + } + + @Override + protected String configName() { + return "iam"; + } + + @Override + protected DotName syncClientName() { + return DotName.createSimple(IamClient.class.getName()); + } + + @Override + protected DotName asyncClientName() { + return DotName.createSimple(IamAsyncClient.class.getName()); + } + + @Override + protected String builtinInterceptorsPath() { + return "software/amazon/awssdk/services/iam/execution.interceptors"; + } + + @BuildStep + AdditionalBeanBuildItem producer() { + return AdditionalBeanBuildItem.unremovableOf(IamClientProducer.class); + } + + @BuildStep + void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, + BuildProducer extensionSslNativeSupport, + BuildProducer feature, + BuildProducer interceptors, + BuildProducer clientProducer) { + + setupExtension(beanRegistrationPhase, extensionSslNativeSupport, feature, interceptors, clientProducer, + buildTimeConfig.sdk, buildTimeConfig.syncClient); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupApacheSyncTransport(List amazonClients, IamRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + IamConfig runtimeConfig, BuildProducer syncTransports) { + + createApacheSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, IamRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + IamConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, IamRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + IamConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, + recorder.getAsyncConfig(runtimeConfig), + asyncTransports); + } + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void createClientBuilders(List syncTransports, + List asyncTransports, IamRecorder recorder, + IamConfig runtimeConfig, BuildProducer builderProducer) { + + createClientBuilders(syncTransports, asyncTransports, builderProducer, + (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), + (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); + } + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void configureClient(List clients, IamRecorder recorder, + AmazonClientRecorder commonRecorder, + IamConfig runtimeConfig, + BuildProducer producer) { + + initClientBuilders(clients, commonRecorder, recorder.getAwsConfig(runtimeConfig), recorder.getSdkConfig(runtimeConfig), + buildTimeConfig.sdk, producer); + } + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void buildClients(List configuredClients, IamRecorder recorder, + BeanContainerBuildItem beanContainer, + ShutdownContextBuildItem shutdown) { + + buildClients(configuredClients, + (syncBuilder) -> recorder.buildClient(syncBuilder, beanContainer.getValue(), shutdown), + (asyncBuilder) -> recorder.buildAsyncClient(asyncBuilder, beanContainer.getValue(), shutdown)); + } +} diff --git a/extensions/amazon-services/iam/deployment/src/test/java/io/quarkus/amazon/iam/deployment/IamSyncClientFullConfigTest.java b/extensions/amazon-services/iam/deployment/src/test/java/io/quarkus/amazon/iam/deployment/IamSyncClientFullConfigTest.java new file mode 100644 index 0000000000000..312eaf1bfd9e6 --- /dev/null +++ b/extensions/amazon-services/iam/deployment/src/test/java/io/quarkus/amazon/iam/deployment/IamSyncClientFullConfigTest.java @@ -0,0 +1,33 @@ +package io.quarkus.amazon.iam.deployment; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import software.amazon.awssdk.services.iam.IamAsyncClient; +import software.amazon.awssdk.services.iam.IamClient; + +public class IamSyncClientFullConfigTest { + + @Inject + IamClient client; + + @Inject + IamAsyncClient async; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource("sync-urlconn-full-config.properties", "application.properties")); + + @Test + public void test() { + Assertions.assertNotNull(client); + Assertions.assertNotNull(async); + } +} diff --git a/extensions/amazon-services/iam/deployment/src/test/resources/sync-urlconn-full-config.properties b/extensions/amazon-services/iam/deployment/src/test/resources/sync-urlconn-full-config.properties new file mode 100644 index 0000000000000..1023781cb67df --- /dev/null +++ b/extensions/amazon-services/iam/deployment/src/test/resources/sync-urlconn-full-config.properties @@ -0,0 +1,10 @@ +quarkus.iam.endpoint-override=http://localhost:9090 + +quarkus.iam.aws.region=us-east-1 +quarkus.iam.aws.credentials.type=static +quarkus.iam.aws.credentials.static-provider.access-key-id=test-key +quarkus.iam.aws.credentials.static-provider.secret-access-key=test-secret + +quarkus.iam.sync-client.type = url +quarkus.iam.sync-client.connection-timeout = 0.100S +quarkus.iam.sync-client.socket-timeout = 0.100S \ No newline at end of file diff --git a/extensions/amazon-services/iam/pom.xml b/extensions/amazon-services/iam/pom.xml new file mode 100644 index 0000000000000..788eb7f6f9611 --- /dev/null +++ b/extensions/amazon-services/iam/pom.xml @@ -0,0 +1,22 @@ + + + 4.0.0 + + + io.quarkus + quarkus-amazon-services-parent + 999-SNAPSHOT + + + quarkus-amazon-iam-parent + Quarkus - Amazon Services - IAM + pom + + + runtime + deployment + + + diff --git a/extensions/amazon-services/iam/runtime/pom.xml b/extensions/amazon-services/iam/runtime/pom.xml new file mode 100644 index 0000000000000..156283cfff290 --- /dev/null +++ b/extensions/amazon-services/iam/runtime/pom.xml @@ -0,0 +1,96 @@ + + + 4.0.0 + + + io.quarkus + quarkus-amazon-iam-parent + 999-SNAPSHOT + + + quarkus-amazon-iam + Quarkus - Amazon Services - IAM - Runtime + Connect to Amazon IAM + + + + io.quarkus + quarkus-core + + + io.quarkus + quarkus-arc + + + io.quarkus + quarkus-netty + + + + io.quarkus + quarkus-amazon-common + + + software.amazon.awssdk + iam + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + + + + software.amazon.awssdk + netty-nio-client + true + + + software.amazon.awssdk + url-connection-client + true + + + software.amazon.awssdk + apache-client + true + + + + org.jboss.logging + commons-logging-jboss-logging + + + + + + + io.quarkus + quarkus-bootstrap-maven-plugin + + + maven-compiler-plugin + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + diff --git a/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamBuildTimeConfig.java b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamBuildTimeConfig.java new file mode 100644 index 0000000000000..07cadf1644727 --- /dev/null +++ b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamBuildTimeConfig.java @@ -0,0 +1,26 @@ +package io.quarkus.amazon.iam.runtime; + +import io.quarkus.amazon.common.runtime.SdkBuildTimeConfig; +import io.quarkus.amazon.common.runtime.SyncHttpClientBuildTimeConfig; +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +/** + * Amazon IAM build time configuration + */ +@ConfigRoot(name = "iam", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public class IamBuildTimeConfig { + + /** + * SDK client configurations for AWS IAM client + */ + @ConfigItem(name = ConfigItem.PARENT) + public SdkBuildTimeConfig sdk; + + /** + * Sync HTTP transport configuration for Amazon IAM client + */ + @ConfigItem + public SyncHttpClientBuildTimeConfig syncClient; +} diff --git a/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamClientProducer.java b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamClientProducer.java new file mode 100644 index 0000000000000..6bc9926bdc8f0 --- /dev/null +++ b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamClientProducer.java @@ -0,0 +1,52 @@ +package io.quarkus.amazon.iam.runtime; + +import javax.annotation.PreDestroy; +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Produces; + +import software.amazon.awssdk.services.iam.IamAsyncClient; +import software.amazon.awssdk.services.iam.IamAsyncClientBuilder; +import software.amazon.awssdk.services.iam.IamClient; +import software.amazon.awssdk.services.iam.IamClientBuilder; + +@ApplicationScoped +public class IamClientProducer { + + private volatile IamClientBuilder syncConfiguredBuilder; + private volatile IamAsyncClientBuilder asyncConfiguredBuilder; + + private IamClient client; + private IamAsyncClient asyncClient; + + @Produces + @ApplicationScoped + public IamClient client() { + client = syncConfiguredBuilder.build(); + return client; + } + + @Produces + @ApplicationScoped + public IamAsyncClient asyncClient() { + asyncClient = asyncConfiguredBuilder.build(); + return asyncClient; + } + + @PreDestroy + public void destroy() { + if (client != null) { + client.close(); + } + if (asyncClient != null) { + asyncClient.close(); + } + } + + public void setSyncConfiguredBuilder(IamClientBuilder syncConfiguredBuilder) { + this.syncConfiguredBuilder = syncConfiguredBuilder; + } + + public void setAsyncConfiguredBuilder(IamAsyncClientBuilder asyncConfiguredBuilder) { + this.asyncConfiguredBuilder = asyncConfiguredBuilder; + } +} diff --git a/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamConfig.java b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamConfig.java new file mode 100644 index 0000000000000..4d6ecad8d02b5 --- /dev/null +++ b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamConfig.java @@ -0,0 +1,42 @@ +package io.quarkus.amazon.iam.runtime; + +import io.quarkus.amazon.common.runtime.AwsConfig; +import io.quarkus.amazon.common.runtime.NettyHttpClientConfig; +import io.quarkus.amazon.common.runtime.SdkConfig; +import io.quarkus.amazon.common.runtime.SyncHttpClientConfig; +import io.quarkus.runtime.annotations.ConfigDocSection; +import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; + +@ConfigRoot(name = "iam", phase = ConfigPhase.RUN_TIME) +public class IamConfig { + + /** + * AWS SDK client configurations + */ + @ConfigItem(name = ConfigItem.PARENT) + @ConfigDocSection + public SdkConfig sdk; + + /** + * AWS services configurations + */ + @ConfigItem + @ConfigDocSection + public AwsConfig aws; + + /** + * Sync HTTP transport configurations + */ + @ConfigItem + @ConfigDocSection + public SyncHttpClientConfig syncClient; + + /** + * Netty HTTP transport configurations + */ + @ConfigItem + @ConfigDocSection + public NettyHttpClientConfig asyncClient; +} diff --git a/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamRecorder.java b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamRecorder.java new file mode 100644 index 0000000000000..eaabce592f437 --- /dev/null +++ b/extensions/amazon-services/iam/runtime/src/main/java/io/quarkus/amazon/iam/runtime/IamRecorder.java @@ -0,0 +1,73 @@ +package io.quarkus.amazon.iam.runtime; + +import io.quarkus.amazon.common.runtime.AwsConfig; +import io.quarkus.amazon.common.runtime.NettyHttpClientConfig; +import io.quarkus.amazon.common.runtime.SdkConfig; +import io.quarkus.amazon.common.runtime.SyncHttpClientConfig; +import io.quarkus.arc.runtime.BeanContainer; +import io.quarkus.runtime.RuntimeValue; +import io.quarkus.runtime.ShutdownContext; +import io.quarkus.runtime.annotations.Recorder; +import software.amazon.awssdk.awscore.client.builder.AwsClientBuilder; +import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.services.iam.IamAsyncClient; +import software.amazon.awssdk.services.iam.IamAsyncClientBuilder; +import software.amazon.awssdk.services.iam.IamClient; +import software.amazon.awssdk.services.iam.IamClientBuilder; + +@Recorder +public class IamRecorder { + + public RuntimeValue getSyncConfig(IamConfig config) { + return new RuntimeValue<>(config.syncClient); + } + + public RuntimeValue getAsyncConfig(IamConfig config) { + return new RuntimeValue<>(config.asyncClient); + } + + public RuntimeValue getAwsConfig(IamConfig config) { + return new RuntimeValue<>(config.aws); + } + + public RuntimeValue getSdkConfig(IamConfig config) { + return new RuntimeValue<>(config.sdk); + } + + public RuntimeValue createSyncBuilder(IamConfig config, RuntimeValue transport) { + IamClientBuilder builder = IamClient.builder(); + if (transport != null) { + builder.httpClientBuilder(transport.getValue()); + } + return new RuntimeValue<>(builder); + } + + public RuntimeValue createAsyncBuilder(IamConfig config, + RuntimeValue transport) { + + IamAsyncClientBuilder builder = IamAsyncClient.builder(); + if (transport != null) { + builder.httpClientBuilder(transport.getValue()); + } + return new RuntimeValue<>(builder); + } + + public RuntimeValue buildClient(RuntimeValue builder, + BeanContainer beanContainer, + ShutdownContext shutdown) { + IamClientProducer producer = beanContainer.instance(IamClientProducer.class); + producer.setSyncConfiguredBuilder((IamClientBuilder) builder.getValue()); + shutdown.addShutdownTask(producer::destroy); + return new RuntimeValue<>(producer.client()); + } + + public RuntimeValue buildAsyncClient(RuntimeValue builder, + BeanContainer beanContainer, + ShutdownContext shutdown) { + IamClientProducer producer = beanContainer.instance(IamClientProducer.class); + producer.setAsyncConfiguredBuilder((IamAsyncClientBuilder) builder.getValue()); + shutdown.addShutdownTask(producer::destroy); + return new RuntimeValue<>(producer.asyncClient()); + } +} diff --git a/extensions/amazon-services/iam/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/amazon-services/iam/runtime/src/main/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..b7c66cf0be5aa --- /dev/null +++ b/extensions/amazon-services/iam/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,11 @@ +--- +name: "Amazon IAM" +metadata: + keywords: + - "iam" + - "aws" + - "amazon" + guide: "https://quarkus.io/guides/amazon-iam" + categories: + - "data" + status: "preview" diff --git a/extensions/amazon-services/kms/deployment/src/main/java/io/quarkus/amazon/kms/deployment/KmsProcessor.java b/extensions/amazon-services/kms/deployment/src/main/java/io/quarkus/amazon/kms/deployment/KmsProcessor.java index 4c97d707ae865..64d256bd147be 100644 --- a/extensions/amazon-services/kms/deployment/src/main/java/io/quarkus/amazon/kms/deployment/KmsProcessor.java +++ b/extensions/amazon-services/kms/deployment/src/main/java/io/quarkus/amazon/kms/deployment/KmsProcessor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.kms.runtime.KmsBuildTimeConfig; import io.quarkus.amazon.kms.runtime.KmsClientProducer; import io.quarkus.amazon.kms.runtime.KmsConfig; @@ -75,26 +79,51 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.sdk, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, KmsRecorder recorder, - AmazonClientTransportRecorder transportRecorder, - KmsConfig runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, KmsRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + KmsConfig runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, KmsRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + KmsConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, KmsRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + KmsConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, KmsRecorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, KmsRecorder recorder, KmsConfig runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/kms/runtime/pom.xml b/extensions/amazon-services/kms/runtime/pom.xml index 3b22a4dc99619..0eba7e257d8be 100644 --- a/extensions/amazon-services/kms/runtime/pom.xml +++ b/extensions/amazon-services/kms/runtime/pom.xml @@ -35,6 +35,21 @@ software.amazon.awssdk kms + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/amazon-services/pom.xml b/extensions/amazon-services/pom.xml index e973bc3730751..128b00c01338a 100644 --- a/extensions/amazon-services/pom.xml +++ b/extensions/amazon-services/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 @@ -17,6 +17,7 @@ common dynamodb + iam s3 sns sqs diff --git a/extensions/amazon-services/s3/deployment/src/main/java/io/quarkus/amazon/s3/deployment/S3Processor.java b/extensions/amazon-services/s3/deployment/src/main/java/io/quarkus/amazon/s3/deployment/S3Processor.java index 57c0b4aaabfa9..b8b128ef98555 100644 --- a/extensions/amazon-services/s3/deployment/src/main/java/io/quarkus/amazon/s3/deployment/S3Processor.java +++ b/extensions/amazon-services/s3/deployment/src/main/java/io/quarkus/amazon/s3/deployment/S3Processor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.s3.runtime.S3BuildTimeConfig; import io.quarkus.amazon.s3.runtime.S3ClientProducer; import io.quarkus.amazon.s3.runtime.S3Config; @@ -76,26 +80,52 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, S3Recorder recorder, - AmazonClientTransportRecorder transportRecorder, - S3Config runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, S3Recorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + S3Config runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, S3Recorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + S3Config runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, S3Recorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + S3Config runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, S3Recorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, + S3Recorder recorder, S3Config runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/s3/runtime/pom.xml b/extensions/amazon-services/s3/runtime/pom.xml index 96bd3e4df1100..96d6f0c353dc0 100644 --- a/extensions/amazon-services/s3/runtime/pom.xml +++ b/extensions/amazon-services/s3/runtime/pom.xml @@ -34,6 +34,21 @@ software.amazon.awssdk s3 + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/amazon-services/ses/deployment/src/main/java/io/quarkus/amazon/ses/deployment/SesProcessor.java b/extensions/amazon-services/ses/deployment/src/main/java/io/quarkus/amazon/ses/deployment/SesProcessor.java index 5811f5e3a5306..198924ee1ae45 100644 --- a/extensions/amazon-services/ses/deployment/src/main/java/io/quarkus/amazon/ses/deployment/SesProcessor.java +++ b/extensions/amazon-services/ses/deployment/src/main/java/io/quarkus/amazon/ses/deployment/SesProcessor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.ses.runtime.SesBuildTimeConfig; import io.quarkus.amazon.ses.runtime.SesClientProducer; import io.quarkus.amazon.ses.runtime.SesConfig; @@ -75,26 +79,51 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.sdk, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, SesRecorder recorder, - AmazonClientTransportRecorder transportRecorder, - SesConfig runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, SesRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + SesConfig runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, SesRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + SesConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, SesRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + SesConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, SesRecorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, SesRecorder recorder, SesConfig runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/ses/runtime/pom.xml b/extensions/amazon-services/ses/runtime/pom.xml index f7f80fee7191b..8ba690cc0d18c 100644 --- a/extensions/amazon-services/ses/runtime/pom.xml +++ b/extensions/amazon-services/ses/runtime/pom.xml @@ -35,6 +35,21 @@ software.amazon.awssdk ses + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/amazon-services/sns/deployment/src/main/java/io/quarkus/amazon/sns/deployment/SnsProcessor.java b/extensions/amazon-services/sns/deployment/src/main/java/io/quarkus/amazon/sns/deployment/SnsProcessor.java index 00e237cb8b657..b4240b44c288d 100644 --- a/extensions/amazon-services/sns/deployment/src/main/java/io/quarkus/amazon/sns/deployment/SnsProcessor.java +++ b/extensions/amazon-services/sns/deployment/src/main/java/io/quarkus/amazon/sns/deployment/SnsProcessor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.sns.runtime.SnsBuildTimeConfig; import io.quarkus.amazon.sns.runtime.SnsClientProducer; import io.quarkus.amazon.sns.runtime.SnsConfig; @@ -76,26 +80,51 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, SnsRecorder recorder, - AmazonClientTransportRecorder transportRecorder, - SnsConfig runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, SnsRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + SnsConfig runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, SnsRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + SnsConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, SnsRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + SnsConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, SnsRecorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, SnsRecorder recorder, SnsConfig runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/sns/runtime/pom.xml b/extensions/amazon-services/sns/runtime/pom.xml index ef899fd76fc2f..af02948433173 100644 --- a/extensions/amazon-services/sns/runtime/pom.xml +++ b/extensions/amazon-services/sns/runtime/pom.xml @@ -34,6 +34,21 @@ software.amazon.awssdk sns + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/amazon-services/sqs/deployment/src/main/java/io/quarkus/amazon/sqs/deployment/SqsProcessor.java b/extensions/amazon-services/sqs/deployment/src/main/java/io/quarkus/amazon/sqs/deployment/SqsProcessor.java index 6ba7663a0400b..b718a9239f458 100644 --- a/extensions/amazon-services/sqs/deployment/src/main/java/io/quarkus/amazon/sqs/deployment/SqsProcessor.java +++ b/extensions/amazon-services/sqs/deployment/src/main/java/io/quarkus/amazon/sqs/deployment/SqsProcessor.java @@ -5,13 +5,17 @@ import org.jboss.jandex.DotName; import io.quarkus.amazon.common.deployment.AbstractAmazonServiceProcessor; +import io.quarkus.amazon.common.deployment.AmazonClientAsyncTransportBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientBuilderConfiguredBuildItem; import io.quarkus.amazon.common.deployment.AmazonClientInterceptorsPathBuildItem; -import io.quarkus.amazon.common.deployment.AmazonClientTransportsBuildItem; +import io.quarkus.amazon.common.deployment.AmazonClientSyncTransportBuildItem; +import io.quarkus.amazon.common.deployment.AmazonHttpClients; +import io.quarkus.amazon.common.runtime.AmazonClientApacheTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientNettyTransportRecorder; import io.quarkus.amazon.common.runtime.AmazonClientRecorder; -import io.quarkus.amazon.common.runtime.AmazonClientTransportRecorder; +import io.quarkus.amazon.common.runtime.AmazonClientUrlConnectionTransportRecorder; import io.quarkus.amazon.sqs.runtime.SqsBuildTimeConfig; import io.quarkus.amazon.sqs.runtime.SqsClientProducer; import io.quarkus.amazon.sqs.runtime.SqsConfig; @@ -76,26 +80,51 @@ void setup(BeanRegistrationPhaseBuildItem beanRegistrationPhase, buildTimeConfig.syncClient); } - @BuildStep + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonApacheHttpServicePresent.class) @Record(ExecutionTime.RUNTIME_INIT) - void setupTransport(List amazonClients, SqsRecorder recorder, - AmazonClientTransportRecorder transportRecorder, - SqsConfig runtimeConfig, BuildProducer clientTransportBuildProducer) { + void setupApacheSyncTransport(List amazonClients, SqsRecorder recorder, + AmazonClientApacheTransportRecorder transportRecorder, + SqsConfig runtimeConfig, BuildProducer syncTransports) { - createTransportBuilders(amazonClients, + createApacheSyncTransportBuilder(amazonClients, transportRecorder, buildTimeConfig.syncClient, recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonUrlConnectionHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupUrlConnectionSyncTransport(List amazonClients, SqsRecorder recorder, + AmazonClientUrlConnectionTransportRecorder transportRecorder, + SqsConfig runtimeConfig, BuildProducer syncTransports) { + + createUrlConnectionSyncTransportBuilder(amazonClients, + transportRecorder, + buildTimeConfig.syncClient, + recorder.getSyncConfig(runtimeConfig), + syncTransports); + } + + @BuildStep(onlyIf = AmazonHttpClients.IsAmazonNettyHttpServicePresent.class) + @Record(ExecutionTime.RUNTIME_INIT) + void setupNettyAsyncTransport(List amazonClients, SqsRecorder recorder, + AmazonClientNettyTransportRecorder transportRecorder, + SqsConfig runtimeConfig, BuildProducer asyncTransports) { + + createNettyAsyncTransportBuilder(amazonClients, + transportRecorder, recorder.getAsyncConfig(runtimeConfig), - clientTransportBuildProducer); + asyncTransports); } @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - void createClientBuilders(List transportBuildItems, SqsRecorder recorder, + void createClientBuilders(List syncTransports, + List asyncTransports, SqsRecorder recorder, SqsConfig runtimeConfig, BuildProducer builderProducer) { - createClientBuilders(transportBuildItems, builderProducer, + createClientBuilders(syncTransports, asyncTransports, builderProducer, (syncTransport) -> recorder.createSyncBuilder(runtimeConfig, syncTransport), (asyncTransport) -> recorder.createAsyncBuilder(runtimeConfig, asyncTransport)); } diff --git a/extensions/amazon-services/sqs/runtime/pom.xml b/extensions/amazon-services/sqs/runtime/pom.xml index f1b7442edc1a8..2ae995235ef61 100644 --- a/extensions/amazon-services/sqs/runtime/pom.xml +++ b/extensions/amazon-services/sqs/runtime/pom.xml @@ -34,6 +34,21 @@ software.amazon.awssdk sqs + + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + url-connection-client + + + software.amazon.awssdk + apache-client + + software.amazon.awssdk diff --git a/extensions/arc/deployment/pom.xml b/extensions/arc/deployment/pom.xml index 2d39867a76e85..9d20499b6ff75 100644 --- a/extensions/arc/deployment/pom.xml +++ b/extensions/arc/deployment/pom.xml @@ -1,54 +1,65 @@ - - quarkus-arc-parent - io.quarkus - 999-SNAPSHOT - ../ - - 4.0.0 + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + + quarkus-arc-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 - quarkus-arc-deployment - Quarkus - ArC - Deployment + quarkus-arc-deployment + Quarkus - ArC - Deployment - - - io.quarkus - quarkus-core-deployment - - - io.quarkus - quarkus-arc - - - io.quarkus.arc - arc-processor - + + + io.quarkus + quarkus-core-deployment + + + io.quarkus + quarkus-vertx-http-dev-console-spi + + + io.quarkus + quarkus-arc + + + io.quarkus.arc + arc-processor + - - io.quarkus - quarkus-junit5-internal - test - + + io.quarkus + quarkus-junit5-internal + test + - + + + org.jboss.spec.javax.ejb + jboss-ejb-api_3.1_spec + 1.0.2.Final + test + - - - - maven-compiler-plugin - - - - io.quarkus - quarkus-extension-processor - ${project.version} - - - - - - + + + + + + maven-compiler-plugin + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AdditionalBeanBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AdditionalBeanBuildItem.java index 6766f2d6b5d24..f85b8c0b33d0d 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AdditionalBeanBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AdditionalBeanBuildItem.java @@ -12,10 +12,15 @@ import io.quarkus.builder.item.MultiBuildItem; /** - * This build item is used to specify one or more additional bean classes to be analyzed. + * This build item is used to specify one or more additional bean classes to be analyzed during bean discovery. *

    * By default, the resulting beans may be removed if they are considered unused and {@link ArcConfig#removeUnusedBeans} is - * enabled. + * enabled. You can change the default behavior by setting the {@link #removable} to {@code false} and via + * {@link Builder#setUnremovable()}. + *

    + * An additional bean may have the scope defaulted via {@link #defaultScope} and {@link Builder#setDefaultScope(DotName)}. The + * default scope is only used if there is no scope declared on the bean class. The default scope should be used in cases where a + * bean class source is not controlled by the extension and the scope annotation cannot be declared directly on the class. */ public final class AdditionalBeanBuildItem extends MultiBuildItem { @@ -122,6 +127,15 @@ public Builder setUnremovable() { return this; } + /** + * The default scope is only used if there is no scope declared on the bean class. + *

    + * The default scope should be used in cases where a bean class source is not controlled by the extension and the + * scope annotation cannot be declared directly on the class. + * + * @param defaultScope + * @return self + */ public Builder setDefaultScope(DotName defaultScope) { this.defaultScope = defaultScope; return this; diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java index 499f6b7024f2b..6ad7fb53dbf23 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java @@ -12,6 +12,8 @@ import io.quarkus.arc.config.ConfigProperties; import io.quarkus.deployment.index.IndexDependencyConfig; +import io.quarkus.runtime.annotations.ConfigDocMapKey; +import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; @@ -42,7 +44,7 @@ public class ArcConfig { *

  • does not declare any producer which is eligible for injection to any injection point,
  • *
  • is not directly eligible for injection into any {@link javax.enterprise.inject.Instance} injection point
  • *
- * + * * @see UnremovableBeanBuildItem */ @ConfigItem(defaultValue = "all") @@ -96,7 +98,7 @@ public class ArcConfig { public Optional> selectedAlternatives; /** - * If set to true then {@code javax.enterprise.inject.Produces} is automatically added to all methods that are + * If set to true then {@code javax.enterprise.inject.Produces} is automatically added to all non-void methods that are * annotated with a scope annotation, a stereotype or a qualifier, and are not annotated with {@code Inject} or * {@code Produces}, and no parameter is annotated with {@code Disposes}, {@code Observes} or {@code ObservesAsync}. */ @@ -140,12 +142,47 @@ public class ArcConfig { public Optional> unremovableTypes; /** - * The artifacts that should be excluded from discovery. These artifacts would be otherwise scanned for beans, i.e. they + * Artifacts that should be excluded from discovery. + *

+ * These artifacts would be otherwise scanned for beans, i.e. they * contain a Jandex index or a beans.xml descriptor. */ @ConfigItem + @ConfigDocSection + @ConfigDocMapKey("dependency-name") Map excludeDependency; + /** + * If set to true then the container attempts to detect "unused removed beans" false positives during programmatic lookup at + * runtime. You can disable this feature to conserve some memory when running your application in production. + * + * @see ArcConfig#removeUnusedBeans + */ + @ConfigItem(defaultValue = "true") + public boolean detectUnusedFalsePositives; + + /** + * If set to true then the container attempts to detect usage of wrong annotations. + *

+ * A wrong annotation may lead to unexpected behavior in a Quarkus application. A typical example is + * {@code @javax.ejb.Singleton} which is often confused with {@code @javax.inject.Singleton}. As a result a component + * annotated with {@code @javax.ejb.Singleton} can be completely ignored. + */ + @ConfigItem(defaultValue = "true") + public boolean detectWrongAnnotations; + + /** + * Dev mode configuration. + */ + @ConfigItem + public ArcDevModeConfig devMode; + + /** + * Test mode configuration. + */ + @ConfigItem + public ArcTestConfig test; + public final boolean isRemoveUnusedBeansFieldValid() { return ALLOWED_REMOVE_UNUSED_BEANS_VALUES.contains(removeUnusedBeans.toLowerCase()); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java new file mode 100644 index 0000000000000..18ac7917643ca --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java @@ -0,0 +1,15 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class ArcDevModeConfig { + + /** + * If set to true then the container monitors business method invocations and fired events during the development mode. + */ + @ConfigItem(defaultValue = "true") + public boolean monitoringEnabled; + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java index c060653781f48..dad024319f59f 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java @@ -33,19 +33,23 @@ import io.quarkus.arc.deployment.ObserverRegistrationPhaseBuildItem.ObserverConfiguratorBuildItem; import io.quarkus.arc.deployment.UnremovableBeanBuildItem.BeanClassAnnotationExclusion; import io.quarkus.arc.deployment.UnremovableBeanBuildItem.BeanClassNameExclusion; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem.BeanTypeExclusion; import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; import io.quarkus.arc.processor.AlternativePriorities; import io.quarkus.arc.processor.AnnotationsTransformer; import io.quarkus.arc.processor.BeanConfigurator; import io.quarkus.arc.processor.BeanDefiningAnnotation; import io.quarkus.arc.processor.BeanDeployment; +import io.quarkus.arc.processor.BeanDeploymentValidator; import io.quarkus.arc.processor.BeanInfo; import io.quarkus.arc.processor.BeanProcessor; +import io.quarkus.arc.processor.BeanRegistrar; import io.quarkus.arc.processor.BytecodeTransformer; import io.quarkus.arc.processor.ContextConfigurator; import io.quarkus.arc.processor.ContextRegistrar; import io.quarkus.arc.processor.DotNames; import io.quarkus.arc.processor.ObserverConfigurator; +import io.quarkus.arc.processor.ObserverRegistrar; import io.quarkus.arc.processor.ReflectionRegistration; import io.quarkus.arc.processor.ResourceOutput; import io.quarkus.arc.processor.StereotypeInfo; @@ -53,17 +57,20 @@ import io.quarkus.arc.runtime.ArcRecorder; import io.quarkus.arc.runtime.BeanContainer; import io.quarkus.arc.runtime.LaunchModeProducer; -import io.quarkus.arc.runtime.LifecycleEventRunner; +import io.quarkus.arc.runtime.LoggerProducer; +import io.quarkus.arc.runtime.test.PreloadedTestApplicationClassPredicate; import io.quarkus.bootstrap.BootstrapDebug; import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.Capability; import io.quarkus.deployment.Feature; +import io.quarkus.deployment.IsTest; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.AdditionalApplicationArchiveMarkerBuildItem; -import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; import io.quarkus.deployment.builditem.ApplicationClassPredicateBuildItem; +import io.quarkus.deployment.builditem.ApplicationIndexBuildItem; import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; import io.quarkus.deployment.builditem.CapabilityBuildItem; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; @@ -81,6 +88,7 @@ import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.QuarkusApplication; import io.quarkus.runtime.annotations.QuarkusMain; +import io.quarkus.runtime.test.TestApplicationClassPredicate; /** * This class contains build steps that trigger various phases of the bean processing. @@ -90,6 +98,7 @@ *

    *
  1. {@link ContextRegistrationPhaseBuildItem}
  2. *
  3. {@link BeanRegistrationPhaseBuildItem}
  4. + *
  5. {@link ObserverRegistrationPhaseBuildItem}
  6. *
  7. {@link ValidationPhaseBuildItem}
  8. *
* These build items are especially useful if an extension needs to produce other build items within the given phase. @@ -122,48 +131,63 @@ AdditionalBeanBuildItem quarkusApplication(CombinedIndexBuildItem combinedIndexB .build(); } - // PHASE 1 - build BeanProcessor, register custom contexts + // PHASE 1 - build BeanProcessor @BuildStep public ContextRegistrationPhaseBuildItem initialize( ArcConfig arcConfig, BeanArchiveIndexBuildItem beanArchiveIndex, - ApplicationArchivesBuildItem applicationArchivesBuildItem, + CombinedIndexBuildItem combinedIndex, + ApplicationIndexBuildItem applicationIndex, List annotationTransformers, List injectionPointTransformers, List observerTransformers, - List interceptorBindingRegistrarBuildItems, + List interceptorBindingRegistrars, + List qualifierRegistrars, List additionalStereotypeBuildItems, List applicationClassPredicates, List additionalBeans, - List beanRegistrars, - List observerRegistrars, - List contextRegistrars, - List beanDeploymentValidators, List resourceAnnotations, List additionalBeanDefiningAnnotations, - List removalExclusions, Optional testClassPredicate, Capabilities capabilities, - CustomScopeAnnotationsBuildItem scopes, - LaunchModeBuildItem launchModeBuildItem) { + CustomScopeAnnotationsBuildItem customScopes, + LaunchModeBuildItem launchModeBuildItem, + BuildProducer applicationClassPredicateProducer) { if (!arcConfig.isRemoveUnusedBeansFieldValid()) { throw new IllegalArgumentException("Invalid configuration value set for 'quarkus.arc.remove-unused-beans'." + " Please use one of " + ArcConfig.ALLOWED_REMOVE_UNUSED_BEANS_VALUES); } - List additionalBeansTypes = beanArchiveIndex.getAdditionalBeans(); + // bean type -> default scope (may be null) + Map additionalBeanTypes = new HashMap<>(); + for (AdditionalBeanBuildItem additionalBean : additionalBeans) { + DotName defaultScope = additionalBean.getDefaultScope(); + for (String beanClass : additionalBean.getBeanClasses()) { + DotName existingDefaultScope = additionalBeanTypes.get(beanClass); + if (existingDefaultScope != null && defaultScope != null && !existingDefaultScope.equals(defaultScope)) { + throw new IllegalStateException("Different default scopes defined for additional bean class: " + beanClass + + "\n\t - scopes: " + defaultScope + " and " + + existingDefaultScope); + } + additionalBeanTypes.put(beanClass, defaultScope); + } + } + Set generatedClassNames = beanArchiveIndex.getGeneratedClassNames(); IndexView index = beanArchiveIndex.getIndex(); BeanProcessor.Builder builder = BeanProcessor.builder(); - IndexView applicationClassesIndex = applicationArchivesBuildItem.getRootArchive().getIndex(); - builder.setApplicationClassPredicate(new AbstractCompositeApplicationClassesPredicate( + IndexView applicationClassesIndex = applicationIndex.getIndex(); + Predicate applicationClassPredicate = new AbstractCompositeApplicationClassesPredicate( applicationClassesIndex, generatedClassNames, applicationClassPredicates, testClassPredicate) { @Override protected DotName getDotName(DotName dotName) { return dotName; } - }); + }; + applicationClassPredicateProducer.produce(new CompletedApplicationClassPredicateBuildItem(applicationClassPredicate)); + builder.setApplicationClassPredicate(applicationClassPredicate); + builder.addAnnotationTransformer(new AnnotationsTransformer() { @Override @@ -175,25 +199,29 @@ public boolean appliesTo(AnnotationTarget.Kind kind) { public void transform(TransformationContext transformationContext) { ClassInfo beanClass = transformationContext.getTarget().asClass(); String beanClassName = beanClass.name().toString(); - if (additionalBeansTypes.contains(beanClassName)) { - if (scopes.isScopeDeclaredOn(beanClass)) { - // If it declares a built-in scope no action is needed - return; - } - // Try to determine the default scope - DotName defaultScope = additionalBeans.stream() - .filter(ab -> ab.contains(beanClassName)).findFirst().map(AdditionalBeanBuildItem::getDefaultScope) - .orElse(null); - if (defaultScope == null && !beanClass.annotations().containsKey(ADDITIONAL_BEAN)) { - // Add special stereotype so that @Dependent is automatically used even if no scope is declared + if (!additionalBeanTypes.containsKey(beanClassName)) { + // Not an additional bean type + return; + } + if (customScopes.isScopeDeclaredOn(beanClass)) { + // If it declares a scope no action is needed + return; + } + DotName defaultScope = additionalBeanTypes.get(beanClassName); + if (defaultScope != null) { + transformationContext.transform().add(defaultScope).done(); + } else { + if (!beanClass.annotations().containsKey(ADDITIONAL_BEAN)) { + // Add special stereotype is added so that @Dependent is automatically used even if no scope is declared + // Otherwise the bean class would be ignored during bean discovery transformationContext.transform().add(ADDITIONAL_BEAN).done(); - } else { - transformationContext.transform().add(defaultScope).done(); } } } }); - builder.setIndex(index); + + builder.setBeanArchiveIndex(index); + builder.setApplicationIndex(combinedIndex.getIndex()); List beanDefiningAnnotations = additionalBeanDefiningAnnotations.stream() .map((s) -> new BeanDefiningAnnotation(s.getName(), s.getDefaultScope())).collect(Collectors.toList()); beanDefiningAnnotations.add(new BeanDefiningAnnotation(ADDITIONAL_BEAN, null)); @@ -219,20 +247,12 @@ public void transform(TransformationContext transformationContext) { builder.addObserverTransformer(transformer.getInstance()); } // register additional interceptor bindings - for (InterceptorBindingRegistrarBuildItem bindingRegistrar : interceptorBindingRegistrarBuildItems) { - builder.addInterceptorbindingRegistrar(bindingRegistrar.getInterceptorBindingRegistrar()); - } - for (BeanRegistrarBuildItem item : beanRegistrars) { - builder.addBeanRegistrar(item.getBeanRegistrar()); - } - for (ObserverRegistrarBuildItem item : observerRegistrars) { - builder.addObserverRegistrar(item.getObserverRegistrar()); - } - for (ContextRegistrarBuildItem item : contextRegistrars) { - builder.addContextRegistrar(item.getContextRegistrar()); + for (InterceptorBindingRegistrarBuildItem registrar : interceptorBindingRegistrars) { + builder.addInterceptorBindingRegistrar(registrar.getInterceptorBindingRegistrar()); } - for (BeanDeploymentValidatorBuildItem item : beanDeploymentValidators) { - builder.addBeanDeploymentValidator(item.getBeanDeploymentValidator()); + // register additional qualifiers + for (QualifierRegistrarBuildItem registrar : qualifierRegistrars) { + builder.addQualifierRegistrar(registrar.getQualifierRegistrar()); } builder.setRemoveUnusedBeans(arcConfig.shouldEnableBeanRemoval()); if (arcConfig.shouldOnlyKeepAppBeans()) { @@ -244,7 +264,7 @@ protected DotName getDotName(BeanInfo bean) { } }); } - builder.addRemovalExclusion(new BeanClassNameExclusion(LifecycleEventRunner.class.getName())); + builder.addRemovalExclusion(new BeanTypeExclusion(DotName.createSimple(TestApplicationClassPredicate.class.getName()))); for (AdditionalBeanBuildItem additionalBean : additionalBeans) { if (!additionalBean.isRemovable()) { for (String beanClass : additionalBean.getBeanClasses()) { @@ -257,9 +277,6 @@ protected DotName getDotName(BeanInfo bean) { builder.addRemovalExclusion(new BeanClassAnnotationExclusion(annotation.getName())); } } - for (UnremovableBeanBuildItem exclusion : removalExclusions) { - builder.addRemovalExclusion(exclusion.getPredicate()); - } // unremovable beans specified in application.properties if (arcConfig.unremovableTypes.isPresent()) { List> classPredicates = initClassPredicates(arcConfig.unremovableTypes.get()); @@ -345,6 +362,7 @@ public Integer compute(AnnotationTarget target, Collection stere public BeanRegistrationPhaseBuildItem registerBeans(ContextRegistrationPhaseBuildItem contextRegistrationPhase, List contextConfigurators, BuildProducer interceptorResolver, + BuildProducer beanDiscoveryFinished, BuildProducer transformedAnnotations) { for (ContextConfiguratorBuildItem contextConfigurator : contextConfigurators) { @@ -353,13 +371,15 @@ public BeanRegistrationPhaseBuildItem registerBeans(ContextRegistrationPhaseBuil value.done(); } } - - BeanDeployment beanDeployment = contextRegistrationPhase.getBeanProcessor().getBeanDeployment(); + BeanProcessor beanProcessor = contextRegistrationPhase.getBeanProcessor(); + beanProcessor.registerScopes(); + BeanRegistrar.RegistrationContext registrationContext = beanProcessor.registerBeans(); + BeanDeployment beanDeployment = beanProcessor.getBeanDeployment(); interceptorResolver.produce(new InterceptorResolverBuildItem(beanDeployment)); + beanDiscoveryFinished.produce(new BeanDiscoveryFinishedBuildItem(beanDeployment)); transformedAnnotations.produce(new TransformedAnnotationsBuildItem(beanDeployment)); - return new BeanRegistrationPhaseBuildItem(contextRegistrationPhase.getBeanProcessor().registerBeans(), - contextRegistrationPhase.getBeanProcessor()); + return new BeanRegistrationPhaseBuildItem(registrationContext, beanProcessor); } // PHASE 3 - register synthetic observers @@ -372,31 +392,41 @@ public ObserverRegistrationPhaseBuildItem registerSyntheticObservers(BeanRegistr configurator.getValues().forEach(BeanConfigurator::done); } - return new ObserverRegistrationPhaseBuildItem(beanRegistrationPhase.getBeanProcessor().registerSyntheticObservers(), - beanRegistrationPhase.getBeanProcessor()); + BeanProcessor beanProcessor = beanRegistrationPhase.getBeanProcessor(); + ObserverRegistrar.RegistrationContext registrationContext = beanProcessor.registerSyntheticObservers(); + + return new ObserverRegistrationPhaseBuildItem(registrationContext, beanProcessor); } // PHASE 4 - initialize and validate the bean deployment @BuildStep public ValidationPhaseBuildItem validate(ObserverRegistrationPhaseBuildItem observerRegistrationPhase, List observerConfigurators, - BuildProducer bytecodeTransformer) { + List unremovableBeans, + BuildProducer bytecodeTransformer, + BuildProducer synthesisFinished) { for (ObserverConfiguratorBuildItem configurator : observerConfigurators) { // Just make sure the configurator is processed configurator.getValues().forEach(ObserverConfigurator::done); } + BeanProcessor beanProcessor = observerRegistrationPhase.getBeanProcessor(); + synthesisFinished.produce(new SynthesisFinishedBuildItem(beanProcessor.getBeanDeployment())); + Consumer bytecodeTransformerConsumer = new BytecodeTransformerConsumer(bytecodeTransformer); - observerRegistrationPhase.getBeanProcessor().initialize(bytecodeTransformerConsumer); - return new ValidationPhaseBuildItem(observerRegistrationPhase.getBeanProcessor().validate(bytecodeTransformerConsumer), - observerRegistrationPhase.getBeanProcessor()); + + beanProcessor.initialize(bytecodeTransformerConsumer, + unremovableBeans.stream().map(UnremovableBeanBuildItem::getPredicate).collect(Collectors.toList())); + BeanDeploymentValidator.ValidationContext validationContext = beanProcessor.validate(bytecodeTransformerConsumer); + + return new ValidationPhaseBuildItem(validationContext, beanProcessor); } // PHASE 5 - generate resources and initialize the container @BuildStep @Record(STATIC_INIT) - public BeanContainerBuildItem generateResources(ArcRecorder recorder, ShutdownContextBuildItem shutdown, + public BeanContainerBuildItem generateResources(ArcConfig config, ArcRecorder recorder, ShutdownContextBuildItem shutdown, ValidationPhaseBuildItem validationPhase, List validationErrors, List beanContainerListenerBuildItems, @@ -435,7 +465,8 @@ public void registerMethod(MethodInfo methodInfo) { public void registerField(FieldInfo fieldInfo) { reflectiveFields.produce(new ReflectiveFieldBuildItem(fieldInfo)); } - }, existingClasses.existingClasses, bytecodeTransformerConsumer); + }, existingClasses.existingClasses, bytecodeTransformerConsumer, + config.shouldEnableBeanRemoval() && config.detectUnusedFalsePositives); for (ResourceOutput.Resource resource : resources) { switch (resource.getType()) { case JAVA_CLASS: @@ -465,13 +496,30 @@ public void registerField(FieldInfo fieldInfo) { ArcContainer container = recorder.getContainer(shutdown); BeanContainer beanContainer = recorder.initBeanContainer(container, beanContainerListenerBuildItems.stream().map(BeanContainerListenerBuildItem::getBeanContainerListener) - .collect(Collectors.toList()), - beanProcessor.getBeanDeployment().getRemovedBeans().stream().flatMap(b -> b.getTypes().stream()) - .map(t -> t.name().toString()) - .collect(Collectors.toSet())); + .collect(Collectors.toList())); return new BeanContainerBuildItem(beanContainer); + } + @BuildStep(onlyIf = IsTest.class) + public AdditionalBeanBuildItem testApplicationClassPredicateBean() { + // We need to register the bean implementation for TestApplicationClassPredicate + // TestApplicationClassPredicate is used programatically in the ArC recorder when StartupEvent is fired + return AdditionalBeanBuildItem.unremovableOf(PreloadedTestApplicationClassPredicate.class); + } + + @BuildStep(onlyIf = IsTest.class) + @Record(ExecutionTime.STATIC_INIT) + void initTestApplicationClassPredicateBean(ArcRecorder recorder, BeanContainerBuildItem beanContainer, + BeanDiscoveryFinishedBuildItem beanDiscoveryFinished, + CompletedApplicationClassPredicateBuildItem predicate) { + Set applicationBeanClasses = new HashSet<>(); + for (BeanInfo bean : beanDiscoveryFinished.beanStream().classBeans()) { + if (predicate.test(bean.getBeanClass())) { + applicationBeanClasses.add(bean.getBeanClass().toString()); + } + } + recorder.initTestApplicationClassPredicate(applicationBeanClasses); } @BuildStep @@ -492,10 +540,15 @@ AdditionalBeanBuildItem launchMode() { } @BuildStep - CustomScopeAnnotationsBuildItem exposeCustomScopeNames(List contextBuildItems) { + AdditionalBeanBuildItem loggerProducer() { + return new AdditionalBeanBuildItem(LoggerProducer.class); + } + + @BuildStep + CustomScopeAnnotationsBuildItem exposeCustomScopeNames(List customScopes) { Set names = new HashSet<>(); - for (ContextRegistrarBuildItem item : contextBuildItems) { - names.addAll(item.getAnnotationNames()); + for (CustomScopeBuildItem customScope : customScopes) { + names.add(customScope.getAnnotationName()); } return new CustomScopeAnnotationsBuildItem(names); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java new file mode 100644 index 0000000000000..3973f0c061e57 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java @@ -0,0 +1,16 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class ArcTestConfig { + + /** + * If set to true then disable {@code StartupEvent} and {@code ShutdownEvent} observers declared on application bean classes + * during the tests. + */ + @ConfigItem(defaultValue = "false") + public boolean disableApplicationLifecycleObservers; + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeBuildItem.java new file mode 100644 index 0000000000000..d0c586447bc56 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeBuildItem.java @@ -0,0 +1,242 @@ +package io.quarkus.arc.deployment; + +import java.util.Collection; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; +import org.jboss.jandex.IndexView; + +import io.quarkus.arc.processor.Annotations; +import io.quarkus.arc.processor.BuiltinScope; +import io.quarkus.arc.processor.DotNames; +import io.quarkus.builder.item.MultiBuildItem; + +/** + * This build item can be used to turn a class that is not annotated with a CDI scope annotation into a bean, i.e. the default + * scope annotation is added automatically if conditions are met. + */ +public final class AutoAddScopeBuildItem extends MultiBuildItem { + + public static Builder builder() { + return new Builder(); + } + + private final MatchPredicate matchPredicate; + private final boolean containerServicesRequired; + private final DotName defaultScope; + private final boolean unremovable; + private final String reason; + + private AutoAddScopeBuildItem(MatchPredicate matchPredicate, boolean containerServicesRequired, + DotName defaultScope, boolean unremovable, String reason) { + this.matchPredicate = matchPredicate; + this.containerServicesRequired = containerServicesRequired; + this.defaultScope = defaultScope; + this.unremovable = unremovable; + this.reason = reason; + } + + public boolean isContainerServicesRequired() { + return containerServicesRequired; + } + + public DotName getDefaultScope() { + return defaultScope; + } + + public boolean isUnremovable() { + return unremovable; + } + + public String getReason() { + return reason != null ? ": " + reason : ""; + } + + public boolean test(ClassInfo clazz, Collection annotations, IndexView index) { + return matchPredicate.test(clazz, annotations, index); + } + + public interface MatchPredicate { + + /** + * @param clazz + * @param annotations + * @param index + * @return {@code true} if the input arguments match the predicate, + * {@code false} otherwise + */ + boolean test(ClassInfo clazz, Collection annotations, IndexView index); + + default MatchPredicate and(MatchPredicate other) { + return new MatchPredicate() { + @Override + public boolean test(ClassInfo clazz, Collection annotations, IndexView index) { + return test(clazz, annotations, index) && other.test(clazz, annotations, index); + } + }; + } + + } + + public static class Builder { + + private MatchPredicate matchPredicate; + private boolean requiresContainerServices; + private DotName defaultScope; + private boolean unremovable; + private String reason; + + private Builder() { + this.defaultScope = BuiltinScope.DEPENDENT.getName(); + this.unremovable = false; + this.requiresContainerServices = false; + } + + /** + * At least one injection point or lifecycle callback must be declared in the class hierarchy. Otherwise, the scope + * annotation is not added. + *

+ * Note that the detection algorithm is just the best effort. Some inheritance rules defined by the spec are not + * followed, e.g. per spec an initializer method is only inherited if not overriden. This method merely scans the + * annotations. + * + * @return self + */ + public Builder requiresContainerServices() { + this.requiresContainerServices = true; + return this; + } + + /** + * The bean will be unremovable. + * + * @see ArcConfig#removeUnusedBeans + * @return self + */ + public Builder unremovable() { + this.unremovable = true; + return this; + } + + /** + * Set a custom predicate. + * + * @param predicate + * @return self + */ + public Builder match(MatchPredicate predicate) { + this.matchPredicate = predicate; + return this; + } + + /** + * The class must be annotated with the given annotation. Otherwise, the scope annotation is not added. + *

+ * The final predicate is a short-circuiting logical AND of the previous predicate (if any) and this condition. + * + * @param annotationName + * @return self + */ + public Builder isAnnotatedWith(DotName annotationName) { + return and((clazz, annotations, index) -> Annotations.contains(annotations, annotationName)); + } + + /** + * The class or any of its element must be annotated with the given annotation. Otherwise, the scope annotation is not + * added. + *

+ * The final predicate is a short-circuiting logical AND of the previous predicate (if any) and this condition. + * + * + * @param annotationNames + * @return self + */ + public Builder containsAnnotations(DotName... annotationNames) { + return and((clazz, annotations, index) -> { + for (DotName annotation : annotationNames) { + if (clazz.annotations().containsKey(annotation)) { + return true; + } + } + return false; + }); + } + + /** + * The class must directly or indirectly implement the given interface. + *

+ * The final predicate is a short-circuiting logical AND of the previous predicate (if any) and this condition. + * + * @param interfaceName + * @return self + */ + public Builder implementsInterface(DotName interfaceName) { + return and((clazz, annotations, index) -> { + if (clazz.interfaceNames().contains(interfaceName)) { + return true; + } + DotName superName = clazz.superName(); + while (superName != null && !superName.equals(DotNames.OBJECT)) { + ClassInfo superClass = index.getClassByName(superName); + if (superClass != null) { + if (superClass.interfaceNames().contains(interfaceName)) { + return true; + } + superName = superClass.superName(); + } + } + return false; + }); + } + + /** + * The scope annotation added to the class. + * + * @param scopeAnnotationName + * @return self + */ + public Builder defaultScope(DotName scopeAnnotationName) { + this.defaultScope = scopeAnnotationName; + return this; + } + + /** + * The scope annotation added to the class. + * + * @param scope + * @return + */ + public Builder defaultScope(BuiltinScope scope) { + return defaultScope(scope.getName()); + } + + /** + * Specify an optional reason description that is used in log messages. + * + * @param reason + * @return the reason why the scope annotation was added + */ + public Builder reason(String reason) { + this.reason = reason; + return this; + } + + private Builder and(MatchPredicate other) { + if (matchPredicate == null) { + matchPredicate = other; + } else { + matchPredicate = matchPredicate.and(other); + } + return this; + } + + public AutoAddScopeBuildItem build() { + if (matchPredicate == null) { + throw new IllegalStateException("A matching predicate must be set!"); + } + return new AutoAddScopeBuildItem(matchPredicate, requiresContainerServices, defaultScope, unremovable, reason); + } + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeProcessor.java new file mode 100644 index 0000000000000..eb6eaf51c2538 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeProcessor.java @@ -0,0 +1,131 @@ +package io.quarkus.arc.deployment; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.jboss.jandex.AnnotationTarget.Kind; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; +import org.jboss.jandex.IndexView; +import org.jboss.logging.Logger; + +import io.quarkus.arc.processor.AnnotationsTransformer; +import io.quarkus.arc.processor.BeanInfo; +import io.quarkus.arc.processor.DotNames; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; + +public class AutoAddScopeProcessor { + + private static final Logger LOGGER = Logger.getLogger(AutoAddScopeProcessor.class); + + @BuildStep + void annotationTransformer(List autoScopes, CustomScopeAnnotationsBuildItem scopes, + List autoInjectAnnotations, + BuildProducer annotationsTransformers, + BuildProducer unremovableBeans, + BeanArchiveIndexBuildItem beanArchiveIndex) throws Exception { + if (autoScopes.isEmpty()) { + return; + } + Set containerAnnotationNames = autoInjectAnnotations.stream().flatMap(a -> a.getAnnotationNames().stream()) + .collect(Collectors.toSet()); + containerAnnotationNames.add(DotNames.POST_CONSTRUCT); + containerAnnotationNames.add(DotNames.PRE_DESTROY); + containerAnnotationNames.add(DotNames.INJECT); + + Set unremovables = new HashSet<>(); + + annotationsTransformers.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { + + @Override + public boolean appliesTo(Kind kind) { + return kind == Kind.CLASS; + } + + @Override + public void transform(TransformationContext context) { + if (scopes.isScopeIn(context.getAnnotations())) { + // Skip classes annotated with a scope + return; + } + ClassInfo clazz = context.getTarget().asClass(); + Boolean requiresContainerServices = null; + + for (AutoAddScopeBuildItem autoScope : autoScopes) { + if (autoScope.isContainerServicesRequired()) { + if (requiresContainerServices == null) { + requiresContainerServices = requiresContainerServices(clazz, containerAnnotationNames, + beanArchiveIndex.getIndex()); + } + if (!requiresContainerServices) { + // Skip - no injection point detected + continue; + } + } + if (autoScope.test(clazz, context.getAnnotations(), beanArchiveIndex.getIndex())) { + context.transform().add(autoScope.getDefaultScope()).done(); + if (autoScope.isUnremovable()) { + unremovables.add(clazz.name()); + } + LOGGER.debugf("Automatically added scope %s to class %s" + autoScope.getReason(), + autoScope.getDefaultScope(), clazz, autoScope.getReason()); + break; + } + } + } + })); + + if (!unremovables.isEmpty()) { + unremovableBeans.produce(new UnremovableBeanBuildItem(new Predicate() { + + @Override + public boolean test(BeanInfo bean) { + return bean.isClassBean() && unremovables.contains(bean.getBeanClass()); + } + })); + } + } + + private boolean requiresContainerServices(ClassInfo clazz, Set containerAnnotationNames, IndexView index) { + // Note that transformed methods/fields are not taken into account + if (hasContainerAnnotation(clazz, containerAnnotationNames)) { + return true; + } + if (index != null) { + DotName superName = clazz.superName(); + while (superName != null && !superName.equals(DotNames.OBJECT)) { + final ClassInfo superClass = index.getClassByName(superName); + if (superClass != null) { + if (hasContainerAnnotation(superClass, containerAnnotationNames)) { + return true; + } + superName = superClass.superName(); + } else { + superName = null; + } + } + } + return false; + } + + private boolean hasContainerAnnotation(ClassInfo clazz, Set containerAnnotationNames) { + if (clazz.annotations().isEmpty() || containerAnnotationNames.isEmpty()) { + return false; + } + return containsAny(clazz, containerAnnotationNames); + } + + private boolean containsAny(ClassInfo clazz, Set annotationNames) { + for (DotName annotation : clazz.annotations().keySet()) { + if (annotationNames.contains(annotation)) { + return true; + } + } + return false; + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java index 548e048081cbd..861dd0206506f 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java @@ -24,11 +24,15 @@ public class AutoInjectFieldProcessor { @BuildStep void autoInjectQualifiers(BeanArchiveIndexBuildItem beanArchiveIndex, - BuildProducer autoInjectAnnotations) { + BuildProducer autoInjectAnnotations, + List qualifierRegistrars) { List qualifiers = new ArrayList<>(); for (AnnotationInstance qualifier : beanArchiveIndex.getIndex().getAnnotations(DotNames.QUALIFIER)) { qualifiers.add(qualifier.target().asClass().name()); } + for (QualifierRegistrarBuildItem registrar : qualifierRegistrars) { + qualifiers.addAll(registrar.getQualifierRegistrar().getAdditionalQualifiers().keySet()); + } autoInjectAnnotations.produce(new AutoInjectAnnotationBuildItem(qualifiers)); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java index 45808cfe3741d..759f627fa64a2 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java @@ -26,7 +26,7 @@ public class AutoProducerMethodsProcessor { private static final Logger LOGGER = Logger.getLogger(AutoProducerMethodsProcessor.class); /** - * Register an annotation transformer that automatically adds {@link Produces} to all non-static methods that are annotated + * Register an annotation transformer that automatically adds {@link Produces} to all non-void methods that are annotated * with a qualifier or a scope annotation. */ @BuildStep @@ -62,6 +62,10 @@ public int getPriority() { @Override public void transform(TransformationContext ctx) { + if (ctx.getTarget().asMethod().returnType().kind() == org.jboss.jandex.Type.Kind.VOID) { + // Skip void methods + return; + } Set methodAnnotations = Annotations.getAnnotations(Kind.METHOD, ctx.getAnnotations()); if (methodAnnotations.isEmpty() || contains(methodAnnotations, DotNames.PRODUCES) || contains(methodAnnotations, DotNames.INJECT)) { diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveIndexBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveIndexBuildItem.java index 8b742ad5a7b00..314475a88c8df 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveIndexBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveIndexBuildItem.java @@ -1,6 +1,5 @@ package io.quarkus.arc.deployment; -import java.util.List; import java.util.Set; import org.jboss.jandex.DotName; @@ -25,13 +24,10 @@ public final class BeanArchiveIndexBuildItem extends SimpleBuildItem { private final IndexView index; private final Set generatedClassNames; - private final List additionalBeans; - public BeanArchiveIndexBuildItem(IndexView index, Set generatedClassNames, - List additionalBeans) { + public BeanArchiveIndexBuildItem(IndexView index, Set generatedClassNames) { this.index = index; this.generatedClassNames = generatedClassNames; - this.additionalBeans = additionalBeans; } public IndexView getIndex() { @@ -42,8 +38,4 @@ public Set getGeneratedClassNames() { return generatedClassNames; } - public List getAdditionalBeans() { - return additionalBeans; - } - } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java index 9a1f71d0a84b7..75847771074b4 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java @@ -7,8 +7,6 @@ import java.util.Set; import java.util.stream.Collectors; -import javax.inject.Inject; - import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget.Kind; import org.jboss.jandex.CompositeIndex; @@ -22,7 +20,6 @@ import io.quarkus.arc.processor.BeanDefiningAnnotation; import io.quarkus.arc.processor.BeanDeployment; import io.quarkus.arc.processor.DotNames; -import io.quarkus.arc.runtime.LifecycleEventRunner; import io.quarkus.bootstrap.model.AppArtifactKey; import io.quarkus.deployment.ApplicationArchive; import io.quarkus.deployment.annotations.BuildProducer; @@ -32,41 +29,32 @@ import io.quarkus.deployment.builditem.LiveReloadBuildItem; import io.quarkus.deployment.index.IndexDependencyConfig; import io.quarkus.deployment.index.IndexingUtil; +import io.quarkus.deployment.index.PersistentClassIndex; public class BeanArchiveProcessor { - @Inject - ApplicationArchivesBuildItem applicationArchivesBuildItem; - - @Inject - List additionalBeanDefiningAnnotations; - - @Inject - List additionalBeans; - - @Inject - List generatedBeans; - - @Inject - BuildProducer generatedClass; - - ArcConfig config; - @BuildStep - public BeanArchiveIndexBuildItem build(LiveReloadBuildItem liveReloadBuildItem) throws Exception { + public BeanArchiveIndexBuildItem build(ArcConfig config, ApplicationArchivesBuildItem applicationArchivesBuildItem, + List additionalBeanDefiningAnnotations, + List additionalBeans, List generatedBeans, + LiveReloadBuildItem liveReloadBuildItem, BuildProducer generatedClass, + CustomScopeAnnotationsBuildItem customScopes) + throws Exception { // First build an index from application archives - IndexView applicationIndex = buildApplicationIndex(); + IndexView applicationIndex = buildApplicationIndex(config, applicationArchivesBuildItem, + additionalBeanDefiningAnnotations, customScopes); // Then build additional index for beans added by extensions Indexer additionalBeanIndexer = new Indexer(); - List additionalBeans = new ArrayList<>(); - for (AdditionalBeanBuildItem i : this.additionalBeans) { - additionalBeans.addAll(i.getBeanClasses()); + List additionalBeanClasses = new ArrayList<>(); + for (AdditionalBeanBuildItem i : additionalBeans) { + additionalBeanClasses.addAll(i.getBeanClasses()); } - additionalBeans.add(LifecycleEventRunner.class.getName()); + + // Build the index for additional beans and generated bean classes Set additionalIndex = new HashSet<>(); - for (String beanClass : additionalBeans) { + for (String beanClass : additionalBeanClasses) { IndexingUtil.indexClass(beanClass, additionalBeanIndexer, applicationIndex, additionalIndex, Thread.currentThread().getContextClassLoader()); } @@ -80,21 +68,23 @@ public BeanArchiveIndexBuildItem build(LiveReloadBuildItem liveReloadBuildItem) generatedBeanClass.getSource())); } - BeanArchives.PersistentClassIndex index = liveReloadBuildItem.getContextObject(BeanArchives.PersistentClassIndex.class); + PersistentClassIndex index = liveReloadBuildItem.getContextObject(PersistentClassIndex.class); if (index == null) { - index = new BeanArchives.PersistentClassIndex(); - liveReloadBuildItem.setContextObject(BeanArchives.PersistentClassIndex.class, index); + index = new PersistentClassIndex(); + liveReloadBuildItem.setContextObject(PersistentClassIndex.class, index); } // Finally, index ArC/CDI API built-in classes return new BeanArchiveIndexBuildItem( - BeanArchives.buildBeanArchiveIndex(Thread.currentThread().getContextClassLoader(), index, applicationIndex, + BeanArchives.buildBeanArchiveIndex(Thread.currentThread().getContextClassLoader(), index.getAdditionalClasses(), + applicationIndex, additionalBeanIndexer.complete()), - generatedClassNames, - additionalBeans); + generatedClassNames); } - private IndexView buildApplicationIndex() { + private IndexView buildApplicationIndex(ArcConfig config, ApplicationArchivesBuildItem applicationArchivesBuildItem, + List additionalBeanDefiningAnnotations, + CustomScopeAnnotationsBuildItem customScopes) { Set archives = applicationArchivesBuildItem.getAllApplicationArchives(); @@ -111,10 +101,13 @@ private IndexView buildApplicationIndex() { } } - Collection beanDefiningAnnotations = BeanDeployment + Set beanDefiningAnnotations = BeanDeployment .initBeanDefiningAnnotations(additionalBeanDefiningAnnotations.stream() .map(bda -> new BeanDefiningAnnotation(bda.getName(), bda.getDefaultScope())) .collect(Collectors.toList()), stereotypes); + for (DotName customScopeAnnotationName : customScopes.getCustomScopeNames()) { + beanDefiningAnnotations.add(customScopeAnnotationName); + } // Also include archives that are not bean archives but contain qualifiers or interceptor bindings beanDefiningAnnotations.add(DotNames.QUALIFIER); beanDefiningAnnotations.add(DotNames.INTERCEPTOR_BINDING); @@ -122,7 +115,7 @@ private IndexView buildApplicationIndex() { List indexes = new ArrayList<>(); for (ApplicationArchive archive : applicationArchivesBuildItem.getApplicationArchives()) { - if (isApplicationArchiveExcluded(archive)) { + if (isApplicationArchiveExcluded(config, archive)) { continue; } IndexView index = archive.getIndex(); @@ -137,14 +130,17 @@ && containsBeanDefiningAnnotation(index, beanDefiningAnnotations))) { return CompositeIndex.create(indexes); } - private boolean isApplicationArchiveExcluded(ApplicationArchive archive) { + private boolean isApplicationArchiveExcluded(ArcConfig config, ApplicationArchive archive) { if (archive.getArtifactKey() != null) { AppArtifactKey key = archive.getArtifactKey(); for (IndexDependencyConfig excludeDependency : config.excludeDependency.values()) { if (Objects.equal(key.getArtifactId(), excludeDependency.artifactId) - && Objects.equal(key.getGroupId(), excludeDependency.groupId) - && Objects.equal(key.getClassifier(), excludeDependency.classifier)) { - return true; + && Objects.equal(key.getGroupId(), excludeDependency.groupId)) { + if (excludeDependency.classifier.isPresent()) { + return Objects.equal(key.getClassifier(), excludeDependency.classifier.get()); + } else { + return true; + } } } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDeploymentValidatorBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDeploymentValidatorBuildItem.java deleted file mode 100644 index ac674cd404536..0000000000000 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDeploymentValidatorBuildItem.java +++ /dev/null @@ -1,25 +0,0 @@ -package io.quarkus.arc.deployment; - -import io.quarkus.arc.processor.BeanDeploymentValidator; -import io.quarkus.builder.item.MultiBuildItem; - -/** - * Register a custom {@link BeanDeploymentValidator} which can either perform additional validation or enforce - * validation skip for certain components. - * - * @see NoArgsConstructorProcessor#addMissingConstructors() - * @see ObserverValidationProcessor - */ -public final class BeanDeploymentValidatorBuildItem extends MultiBuildItem { - - private final BeanDeploymentValidator beanDeploymentValidator; - - public BeanDeploymentValidatorBuildItem(BeanDeploymentValidator beanDeploymentValidator) { - this.beanDeploymentValidator = beanDeploymentValidator; - } - - public BeanDeploymentValidator getBeanDeploymentValidator() { - return beanDeploymentValidator; - } - -} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDiscoveryFinishedBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDiscoveryFinishedBuildItem.java new file mode 100644 index 0000000000000..1072487dae251 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanDiscoveryFinishedBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.arc.processor.BeanDeployment; + +/** + * Consumers of this build item can easily inspect all class-based beans, observers and injection points registered in the + * application. Synthetic beans and observers are not included. If you need to consider synthetic components as well use + * the {@link SynthesisFinishedBuildItem} instead. + *

+ * Additionaly, the bean resolver can be used to apply the type-safe resolution rules, e.g. to find out wheter there is a bean + * that would satisfy certain combination of required type and qualifiers. + * + * @see SynthesisFinishedBuildItem + */ +public final class BeanDiscoveryFinishedBuildItem extends RegisteredComponentsBuildItem { + + public BeanDiscoveryFinishedBuildItem(BeanDeployment beanDeployment) { + super(beanDeployment); + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrarBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrarBuildItem.java deleted file mode 100644 index d318e50ef1b24..0000000000000 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrarBuildItem.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.quarkus.arc.deployment; - -import io.quarkus.arc.processor.BeanRegistrar; -import io.quarkus.builder.item.MultiBuildItem; - -/** - * Used for registration of a synthetic bean; grants access to {@link BeanRegistrar} which is an API allowing to - * specify a synthetic bean through series of configuration methods (scope, type, qualifiers, ...). - * - * This is a build time alternative to CDI BeanConfigurator API. - */ -public final class BeanRegistrarBuildItem extends MultiBuildItem { - - private final BeanRegistrar beanRegistrar; - - public BeanRegistrarBuildItem(BeanRegistrar beanRegistrar) { - this.beanRegistrar = beanRegistrar; - } - - public BeanRegistrar getBeanRegistrar() { - return beanRegistrar; - } -} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrationPhaseBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrationPhaseBuildItem.java index ba956c71f37b7..aa7d79a46923a 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrationPhaseBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanRegistrationPhaseBuildItem.java @@ -1,12 +1,15 @@ package io.quarkus.arc.deployment; import java.util.Arrays; +import java.util.Collection; import java.util.List; import io.quarkus.arc.processor.BeanConfigurator; import io.quarkus.arc.processor.BeanProcessor; import io.quarkus.arc.processor.BeanRegistrar; import io.quarkus.arc.processor.BeanRegistrar.RegistrationContext; +import io.quarkus.arc.processor.BuildExtension; +import io.quarkus.arc.processor.InjectionPointInfo; import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.deployment.annotations.BuildProducer; @@ -36,6 +39,10 @@ public RegistrationContext getContext() { return context; } + public Collection getInjectionPoints() { + return getContext().get(BuildExtension.Key.INJECTION_POINTS); + } + public BeanProcessor getBeanProcessor() { return beanProcessor; } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeConditionBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeConditionBuildItem.java index e912c5e56e4a7..1554e7b54eb8c 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeConditionBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeConditionBuildItem.java @@ -10,12 +10,15 @@ public final class BuildTimeConditionBuildItem extends MultiBuildItem { private final boolean enabled; public BuildTimeConditionBuildItem(AnnotationTarget target, boolean enabled) { - AnnotationTarget.Kind kind = target.kind(); - if ((kind != AnnotationTarget.Kind.CLASS) && (kind != AnnotationTarget.Kind.METHOD) - && (kind != AnnotationTarget.Kind.FIELD)) { - throw new IllegalArgumentException("'target' can only be a class, a field or a method"); + switch (target.kind()) { + case CLASS: + case METHOD: + case FIELD: + this.target = target; + break; + default: + throw new IllegalArgumentException("'target' can only be a class, a field or a method"); } - this.target = target; this.enabled = enabled; } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeEnabledProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeEnabledProcessor.java index d93b1788df7de..7a1f8d12ccec5 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeEnabledProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BuildTimeEnabledProcessor.java @@ -5,6 +5,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.spi.ConfigProviderResolver; @@ -14,15 +16,18 @@ import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.DotName; import org.jboss.jandex.FieldInfo; +import org.jboss.jandex.IndexView; import org.jboss.jandex.MethodInfo; import org.jboss.logging.Logger; import io.quarkus.arc.processor.AnnotationsTransformer; +import io.quarkus.arc.processor.AnnotationsTransformer.TransformationContext; import io.quarkus.arc.processor.DotNames; import io.quarkus.arc.processor.Transformation; import io.quarkus.arc.profile.IfBuildProfile; import io.quarkus.arc.profile.UnlessBuildProfile; import io.quarkus.arc.properties.IfBuildProperty; +import io.quarkus.arc.properties.UnlessBuildProperty; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; @@ -36,41 +41,108 @@ public class BuildTimeEnabledProcessor { private static final DotName UNLESS_BUILD_PROFILE = DotName.createSimple(UnlessBuildProfile.class.getName()); private static final DotName IF_BUILD_PROPERTY = DotName.createSimple(IfBuildProperty.class.getName()); + private static final DotName UNLESS_BUILD_PROPERTY = DotName.createSimple(UnlessBuildProperty.class.getName()); @BuildStep - void ifBuildProfile(CombinedIndexBuildItem index, BuildProducer producer) { + void ifBuildProfile(CombinedIndexBuildItem index, BuildProducer producer, + BuildProducer producerPreAdditionalBean) { Collection annotationInstances = index.getIndex().getAnnotations(IF_BUILD_PROFILE); for (AnnotationInstance instance : annotationInstances) { String profileOnInstance = instance.value().asString(); boolean enabled = profileOnInstance.equals(ProfileManager.getActiveProfile()); if (enabled) { - LOGGER.debug("Enabling " + instance + " since the profile value matches the active profile."); + LOGGER.debug("Enabling " + instance.target() + " since the profile value matches the active profile."); } else { - LOGGER.debug("Disabling " + instance + " since the profile value does not match the active profile."); + LOGGER.debug("Disabling " + instance.target() + " since the profile value does not match the active profile."); } producer.produce(new BuildTimeConditionBuildItem(instance.target(), enabled)); + producerPreAdditionalBean.produce(new PreAdditionalBeanBuildTimeConditionBuildItem(instance.target(), enabled)); } } @BuildStep - void unlessBuildProfile(CombinedIndexBuildItem index, BuildProducer producer) { + void unlessBuildProfile(CombinedIndexBuildItem index, BuildProducer producer, + BuildProducer producerPreAdditionalBean) { Collection annotationInstances = index.getIndex().getAnnotations(UNLESS_BUILD_PROFILE); for (AnnotationInstance instance : annotationInstances) { String profileOnInstance = instance.value().asString(); boolean enabled = !profileOnInstance.equals(ProfileManager.getActiveProfile()); if (enabled) { - LOGGER.debug("Enabling " + instance + " since the profile value does not match the active profile."); + LOGGER.debug("Enabling " + instance.target() + " since the profile value does not match the active profile."); } else { - LOGGER.debug("Disabling " + instance + " since the profile value matches the active profile."); + LOGGER.debug("Disabling " + instance.target() + " since the profile value matches the active profile."); } producer.produce(new BuildTimeConditionBuildItem(instance.target(), enabled)); + producerPreAdditionalBean.produce(new PreAdditionalBeanBuildTimeConditionBuildItem(instance.target(), enabled)); } } @BuildStep void ifBuildProperty(BeanArchiveIndexBuildItem index, BuildProducer producer) { + buildProperty(IF_BUILD_PROPERTY, new BiFunction() { + @Override + public Boolean apply(String stringValue, String expectedStringValue) { + return stringValue.equals(expectedStringValue); + } + }, index.getIndex(), new BiConsumer() { + @Override + public void accept(AnnotationTarget target, Boolean enabled) { + producer.produce(new BuildTimeConditionBuildItem(target, enabled)); + } + }); + } + + @BuildStep + void unlessBuildProperty(BeanArchiveIndexBuildItem index, BuildProducer producer) { + buildProperty(UNLESS_BUILD_PROPERTY, new BiFunction() { + @Override + public Boolean apply(String stringValue, String expectedStringValue) { + return !stringValue.equals(expectedStringValue); + } + }, index.getIndex(), new BiConsumer() { + @Override + public void accept(AnnotationTarget target, Boolean enabled) { + producer.produce(new BuildTimeConditionBuildItem(target, enabled)); + } + }); + } + + @BuildStep + void ifBuildPropertyPreAdditionalBean(CombinedIndexBuildItem index, + BuildProducer producer) { + buildProperty(IF_BUILD_PROPERTY, new BiFunction() { + @Override + public Boolean apply(String stringValue, String expectedStringValue) { + return stringValue.equals(expectedStringValue); + } + }, index.getIndex(), new BiConsumer() { + @Override + public void accept(AnnotationTarget target, Boolean enabled) { + producer.produce(new PreAdditionalBeanBuildTimeConditionBuildItem(target, enabled)); + } + }); + } + + @BuildStep + void unlessBuildPropertyPreAdditionalBean(CombinedIndexBuildItem index, + BuildProducer producer) { + buildProperty(UNLESS_BUILD_PROPERTY, new BiFunction() { + @Override + public Boolean apply(String stringValue, String expectedStringValue) { + return !stringValue.equals(expectedStringValue); + } + }, index.getIndex(), new BiConsumer() { + @Override + public void accept(AnnotationTarget target, Boolean enabled) { + producer.produce(new PreAdditionalBeanBuildTimeConditionBuildItem(target, enabled)); + } + }); + } + + void buildProperty(DotName annotationName, BiFunction testFun, IndexView index, + BiConsumer producer) { Config config = ConfigProviderResolver.instance().getConfig(); - Collection annotationInstances = index.getIndex().getAnnotations(IF_BUILD_PROPERTY); + Collection annotationInstances = index.getAnnotations(annotationName); for (AnnotationInstance instance : annotationInstances) { String propertyName = instance.value("name").asString(); String expectedStringValue = instance.value("stringValue").asString(); @@ -80,27 +152,26 @@ void ifBuildProperty(BeanArchiveIndexBuildItem index, BuildProducer optionalValue = config.getOptionalValue(propertyName, String.class); boolean enabled; if (optionalValue.isPresent()) { - if (optionalValue.get().equals(expectedStringValue)) { - LOGGER.debug("Enabling " + instance + " since the property value matches the expected one."); + if (testFun.apply(optionalValue.get(), expectedStringValue)) { + LOGGER.debug("Enabling " + instance.target() + " since the property value matches the expected one."); enabled = true; } else { - LOGGER.debug("Disabling " + instance - + " since the property value does not match the expected one."); + LOGGER.debug("Disabling " + instance.target() + + " since the property value matches the specified value one."); enabled = false; } } else { if (enableIfMissing) { - LOGGER.debug("Enabling " + instance + LOGGER.debug("Enabling " + instance.target() + " since the property has not been set and 'enableIfMissing' is set to 'true'."); enabled = true; } else { - LOGGER.debug("Disabling " + instance + LOGGER.debug("Disabling " + instance.target() + " since the property has not been set and 'enableIfMissing' is set to 'false'."); enabled = false; } } - - producer.produce(new BuildTimeConditionBuildItem(instance.target(), enabled)); + producer.accept(instance.target(), enabled); } } @@ -146,18 +217,18 @@ public void transform(TransformationContext ctx) { if (ctx.isClass()) { DotName classDotName = target.asClass().name(); if (classTargets.containsKey(classDotName)) { - transformBean(target, ctx.transform(), classTargets.get(classDotName)); + transformBean(target, ctx, classTargets.get(classDotName)); } } else if (ctx.isMethod()) { MethodInfo method = target.asMethod(); if (methodTargets.containsKey(method)) { - transformBean(target, ctx.transform(), methodTargets.get(method)); + transformBean(target, ctx, methodTargets.get(method)); } } else if (ctx.isField()) { FieldInfo field = target.asField(); String uniqueFieldName = toUniqueString(field); if (fieldTargets.containsKey(uniqueFieldName)) { - transformBean(target, ctx.transform(), fieldTargets.get(uniqueFieldName)); + transformBean(target, ctx, fieldTargets.get(uniqueFieldName)); } } } @@ -168,32 +239,18 @@ private String toUniqueString(FieldInfo field) { return field.declaringClass().name().toString() + "." + field.name(); } - private void transformBean(AnnotationTarget target, Transformation transform, boolean enable) { - if (enable) { - enableBean(transform); - } else { - disableBean(target, transform); + private void transformBean(AnnotationTarget target, TransformationContext ctx, boolean enabled) { + if (!enabled) { + Transformation transform = ctx.transform(); + if (target.kind() == Kind.CLASS) { + // Veto the class + transform.add(DotNames.VETOED); + } else { + // Add @Alternative to the producer + transform.add(DotNames.ALTERNATIVE); + } + transform.done(); } - transform.done(); - } - - private void enableBean(Transformation transform) { - transform.add(DotNames.ALTERNATIVE_PRIORITY, - createAlternativePriority()); } - private AnnotationValue createAlternativePriority() { - // use Integer.MAX_VALUE - 1 to avoid having conflicts with enabling beans via config - return AnnotationValue.createIntegerValue("value", Integer.MAX_VALUE - 1); - } - - private void disableBean(AnnotationTarget target, Transformation transform) { - if (target.kind() == Kind.CLASS) { - // Veto the class - transform.add(DotNames.VETOED); - } else { - // Add @Alternative to the producer - transform.add(DotNames.ALTERNATIVE); - } - } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CommandLineArgumentsProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CommandLineArgumentsProcessor.java index 25369b165810a..51ff75c510d90 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CommandLineArgumentsProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CommandLineArgumentsProcessor.java @@ -1,26 +1,27 @@ package io.quarkus.arc.deployment; -import io.quarkus.arc.runtime.ArcRecorder; -import io.quarkus.arc.runtime.CommandLineArgumentsProducer; +import org.jboss.jandex.DotName; +import org.jboss.jandex.Type; +import org.jboss.jandex.Type.Kind; + +import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; -import io.quarkus.deployment.annotations.ExecutionTime; -import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.RawCommandLineArgumentsBuildItem; import io.quarkus.runtime.annotations.CommandLineArguments; public class CommandLineArgumentsProcessor { @BuildStep - @Record(ExecutionTime.STATIC_INIT) - BeanContainerListenerBuildItem commandLineArgs(RawCommandLineArgumentsBuildItem rawCommandLineArgumentsBuildItem, - ArcRecorder arcRecorder) { - //todo: this should be filtered - return new BeanContainerListenerBuildItem(arcRecorder.initCommandLineArgs(rawCommandLineArgumentsBuildItem)); - } + SyntheticBeanBuildItem commandLineArgs(RawCommandLineArgumentsBuildItem rawCommandLineArgumentsBuildItem, + BuildProducer additionalBeans) { + additionalBeans.produce(new AdditionalBeanBuildItem(CommandLineArguments.class)); - @BuildStep - AdditionalBeanBuildItem qualifier() { - return AdditionalBeanBuildItem.builder().setUnremovable() - .addBeanClasses(CommandLineArguments.class, CommandLineArgumentsProducer.class).build(); + Type stringArray = Type.create(DotName.createSimple(String[].class.getName()), Kind.ARRAY); + // implClazz is ignored because a provider type is set + return SyntheticBeanBuildItem.configure(Object.class).providerType(stringArray).addType(stringArray) + .addQualifier(CommandLineArguments.class) + .setRuntimeInit().supplier(rawCommandLineArgumentsBuildItem) + .unremovable().done(); } + } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CompletedApplicationClassPredicateBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CompletedApplicationClassPredicateBuildItem.java new file mode 100644 index 0000000000000..fd02286dbee49 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CompletedApplicationClassPredicateBuildItem.java @@ -0,0 +1,29 @@ +package io.quarkus.arc.deployment; + +import java.util.function.Predicate; + +import org.jboss.jandex.DotName; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * This build item hold the "final" predicate that is used to distinguish application classes from framework/library classes. + */ +public final class CompletedApplicationClassPredicateBuildItem extends SimpleBuildItem implements Predicate { + + private final Predicate applicationClassPredicate; + + CompletedApplicationClassPredicateBuildItem(Predicate applicationClassPredicate) { + this.applicationClassPredicate = applicationClassPredicate; + } + + public Predicate getApplicationClassPredicate() { + return applicationClassPredicate; + } + + @Override + public boolean test(DotName name) { + return applicationClassPredicate.test(name); + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigBuildStep.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigBuildStep.java index 6d90e69e4ba84..dc5d10e7a70d2 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigBuildStep.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigBuildStep.java @@ -1,21 +1,26 @@ package io.quarkus.arc.deployment; import static io.quarkus.deployment.annotations.ExecutionTime.RUNTIME_INIT; +import static io.smallrye.config.ConfigMappings.ConfigMappingWithPrefix.configMappingWithPrefix; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.mapping; +import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; -import java.util.Collections; +import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.stream.Stream; import javax.enterprise.context.Dependent; import javax.enterprise.inject.CreationException; import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.DotName; @@ -25,20 +30,26 @@ import org.jboss.jandex.Type.Kind; import io.quarkus.arc.deployment.BeanRegistrationPhaseBuildItem.BeanConfiguratorBuildItem; -import io.quarkus.arc.processor.BeanRegistrar; -import io.quarkus.arc.processor.BuildExtension; import io.quarkus.arc.processor.DotNames; import io.quarkus.arc.processor.InjectionPointInfo; import io.quarkus.arc.runtime.ConfigBeanCreator; +import io.quarkus.arc.runtime.ConfigMappingCreator; import io.quarkus.arc.runtime.ConfigRecorder; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.ConfigurationBuildItem; +import io.quarkus.deployment.builditem.GeneratedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.configuration.definition.RootDefinition; +import io.quarkus.deployment.recording.RecorderContext; import io.quarkus.gizmo.ResultHandle; import io.quarkus.runtime.annotations.ConfigPhase; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.ConfigMappingLoader; +import io.smallrye.config.ConfigMappingMetadata; +import io.smallrye.config.ConfigMappings.ConfigMappingWithPrefix; import io.smallrye.config.inject.ConfigProducer; /** @@ -47,23 +58,26 @@ public class ConfigBuildStep { private static final DotName CONFIG_PROPERTY_NAME = DotName.createSimple(ConfigProperty.class.getName()); + private static final DotName CONFIG_MAPPING_NAME = DotName.createSimple(ConfigMapping.class.getName()); private static final DotName SET_NAME = DotName.createSimple(Set.class.getName()); private static final DotName LIST_NAME = DotName.createSimple(List.class.getName()); + private static final DotName CONFIG_VALUE_NAME = DotName.createSimple(io.smallrye.config.ConfigValue.class.getName()); + @BuildStep AdditionalBeanBuildItem bean() { return new AdditionalBeanBuildItem(ConfigProducer.class); } @BuildStep - void analyzeConfigPropertyInjectionPoints(BeanRegistrationPhaseBuildItem beanRegistrationPhase, + void analyzeConfigPropertyInjectionPoints(BeanDiscoveryFinishedBuildItem beanDiscovery, BuildProducer configProperties, BuildProducer reflectiveClass, - BuildProducer beanConfigurators) { + BuildProducer syntheticBeans) { Set customBeanTypes = new HashSet<>(); - for (InjectionPointInfo injectionPoint : beanRegistrationPhase.getContext().get(BuildExtension.Key.INJECTION_POINTS)) { + for (InjectionPointInfo injectionPoint : beanDiscovery.getInjectionPoints()) { if (injectionPoint.hasDefaultedQualifier()) { // Defaulted qualifier means no @ConfigProperty continue; @@ -96,7 +110,10 @@ void analyzeConfigPropertyInjectionPoints(BeanRegistrationPhaseBuildItem beanReg customBeanTypes.add(requiredType); } - if (DotNames.OPTIONAL.equals(requiredType.name())) { + if (DotNames.OPTIONAL.equals(requiredType.name()) + || DotNames.OPTIONAL_INT.equals(requiredType.name()) + || DotNames.OPTIONAL_LONG.equals(requiredType.name()) + || DotNames.OPTIONAL_DOUBLE.equals(requiredType.name())) { // Never validate Optional values continue; } @@ -114,13 +131,14 @@ void analyzeConfigPropertyInjectionPoints(BeanRegistrationPhaseBuildItem beanReg // Implicit converters are most likely used reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, type.name().toString())); } - beanConfigurators.produce(new BeanConfiguratorBuildItem(beanRegistrationPhase.getContext().configure( - type.kind() == Kind.ARRAY ? DotName.createSimple(ConfigBeanCreator.class.getName()) : type.name()) + DotName implClazz = type.kind() == Kind.ARRAY ? DotName.createSimple(ConfigBeanCreator.class.getName()) + : type.name(); + syntheticBeans.produce(SyntheticBeanBuildItem.configure(implClazz) .creator(ConfigBeanCreator.class) .providerType(type) .types(type) - .qualifiers(AnnotationInstance.create(CONFIG_PROPERTY_NAME, null, Collections.emptyList())) - .param("requiredType", type.name().toString()))); + .addQualifier(CONFIG_PROPERTY_NAME) + .param("requiredType", type.name().toString()).done()); } } @@ -146,29 +164,110 @@ void validateConfigProperties(ConfigRecorder recorder, List configRootClass = rootDefinition.getConfigurationClass(); - context.configure(configRootClass).types(configRootClass) - .scope(Dependent.class).creator(mc -> { - // e.g. return Config.ApplicationConfig - ResultHandle configRoot = mc.readStaticField(rootDefinition.getDescriptor()); - // BUILD_AND_RUN_TIME_FIXED roots are always set before the container is started (in the static initializer of the generated Config class) - // However, RUN_TIME roots may be not be set when the bean instance is created - mc.ifNull(configRoot).trueBranch().throwException(CreationException.class, - String.format("Config root [%s] with config phase [%s] not initialized yet.", - configRootClass.getName(), rootDefinition.getConfigPhase().name())); - mc.returnValue(configRoot); - }).done(); + void registerConfigRootsAsBeans(ConfigurationBuildItem configItem, BuildProducer syntheticBeans) { + for (RootDefinition rootDefinition : configItem.getReadResult().getAllRoots()) { + if (rootDefinition.getConfigPhase() == ConfigPhase.BUILD_AND_RUN_TIME_FIXED + || rootDefinition.getConfigPhase() == ConfigPhase.RUN_TIME) { + Class configRootClass = rootDefinition.getConfigurationClass(); + syntheticBeans.produce(SyntheticBeanBuildItem.configure(configRootClass).types(configRootClass) + .scope(Dependent.class).creator(mc -> { + // e.g. return Config.ApplicationConfig + ResultHandle configRoot = mc.readStaticField(rootDefinition.getDescriptor()); + // BUILD_AND_RUN_TIME_FIXED roots are always set before the container is started (in the static initializer of the generated Config class) + // However, RUN_TIME roots may be not be set when the bean instance is created + mc.ifNull(configRoot).trueBranch().throwException(CreationException.class, + String.format("Config root [%s] with config phase [%s] not initialized yet.", + configRootClass.getName(), rootDefinition.getConfigPhase().name())); + mc.returnValue(configRoot); + }).done()); + } + } + } + + @BuildStep + void generateConfigMappings( + CombinedIndexBuildItem combinedIndex, + BeanRegistrationPhaseBuildItem beanRegistrationPhase, + BuildProducer generatedClasses, + BuildProducer reflectiveClasses, + BuildProducer configMappings, + BuildProducer beanConfigurators) { + + for (AnnotationInstance instance : combinedIndex.getIndex().getAnnotations(CONFIG_MAPPING_NAME)) { + AnnotationTarget target = instance.target(); + if (!target.kind().equals(AnnotationTarget.Kind.CLASS)) { + continue; + } + + Class type = toClass(target.asClass()); + String prefix = Optional.ofNullable(instance.value("prefix")).map(AnnotationValue::asString).orElse(""); + + List configMappingsMetadata = ConfigMappingLoader.getConfigMappingsMetadata(type); + List mappingsInfo = new ArrayList<>(); + configMappingsMetadata.forEach(mappingMetadata -> { + generatedClasses.produce( + new GeneratedClassBuildItem(true, mappingMetadata.getClassName(), mappingMetadata.getClassBytes())); + reflectiveClasses + .produce(ReflectiveClassBuildItem.builder(mappingMetadata.getInterfaceType()).methods(true).build()); + reflectiveClasses + .produce(ReflectiveClassBuildItem.builder(mappingMetadata.getClassName()).constructors(true).build()); + + ClassInfo mappingInfo = combinedIndex.getIndex() + .getClassByName(DotName.createSimple(mappingMetadata.getInterfaceType().getName())); + if (mappingInfo != null) { + mappingsInfo.add(mappingInfo); + } + }); + + // Search and register possible classes for implicit Converter methods + for (ClassInfo classInfo : mappingsInfo) { + for (MethodInfo method : classInfo.methods()) { + if (!isHandledByProducers(method.returnType()) && + mappingsInfo.stream() + .map(ClassInfo::name) + .noneMatch(name -> name.equals(method.returnType().name()))) { + reflectiveClasses + .produce(new ReflectiveClassBuildItem(true, false, method.returnType().name().toString())); } } } - }); + + configMappings.produce(new ConfigMappingBuildItem(type, prefix)); + + beanConfigurators.produce(new BeanConfiguratorBuildItem( + beanRegistrationPhase.getContext() + .configure(type) + .types(type) + .creator(ConfigMappingCreator.class) + .param("type", type))); + } + } + + @BuildStep + @Record(RUNTIME_INIT) + void registerConfigMappings( + RecorderContext context, + ConfigRecorder recorder, + List configMappings) throws Exception { + + context.registerNonDefaultConstructor( + ConfigMappingWithPrefix.class.getDeclaredConstructor(Class.class, String.class), + configMappingWithPrefix -> Stream.of(configMappingWithPrefix.getKlass(), configMappingWithPrefix.getPrefix()) + .collect(toList())); + + recorder.registerConfigMappings(configMappings.stream() + .map(configMapping -> configMappingWithPrefix(configMapping.getInterfaceType(), configMapping.getPrefix())) + .collect(toSet())); + } + + private static Class toClass(ClassInfo classInfo) { + String className = classInfo.name().toString(); + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + try { + return classLoader.loadClass(className); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("The class (" + className + ") cannot be created during deployment.", e); + } } private String getPropertyName(String name, ClassInfo declaringClass) { @@ -181,7 +280,7 @@ private String getPropertyName(String name, ClassInfo declaringClass) { return builder.append(".").append(name).toString(); } - private boolean isHandledByProducers(Type type) { + public static boolean isHandledByProducers(Type type) { if (type.kind() == Kind.ARRAY) { return false; } @@ -190,6 +289,9 @@ private boolean isHandledByProducers(Type type) { } return DotNames.STRING.equals(type.name()) || DotNames.OPTIONAL.equals(type.name()) || + DotNames.OPTIONAL_INT.equals(type.name()) || + DotNames.OPTIONAL_LONG.equals(type.name()) || + DotNames.OPTIONAL_DOUBLE.equals(type.name()) || SET_NAME.equals(type.name()) || LIST_NAME.equals(type.name()) || DotNames.LONG.equals(type.name()) || @@ -199,7 +301,8 @@ private boolean isHandledByProducers(Type type) { DotNames.DOUBLE.equals(type.name()) || DotNames.SHORT.equals(type.name()) || DotNames.BYTE.equals(type.name()) || - DotNames.CHARACTER.equals(type.name()); + DotNames.CHARACTER.equals(type.name()) || + CONFIG_VALUE_NAME.equals(type.name()); } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigMappingBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigMappingBuildItem.java new file mode 100644 index 0000000000000..b97fa357e77e9 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ConfigMappingBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.builder.item.MultiBuildItem; + +public final class ConfigMappingBuildItem extends MultiBuildItem { + private final Class interfaceType; + private final String prefix; + + public ConfigMappingBuildItem(final Class interfaceType, final String prefix) { + this.interfaceType = interfaceType; + this.prefix = prefix; + } + + public Class getInterfaceType() { + return interfaceType; + } + + public String getPrefix() { + return prefix; + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrarBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrarBuildItem.java deleted file mode 100644 index 5a24cfe1f85c7..0000000000000 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrarBuildItem.java +++ /dev/null @@ -1,56 +0,0 @@ -package io.quarkus.arc.deployment; - -import java.lang.annotation.Annotation; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; - -import org.jboss.jandex.DotName; -import org.jboss.logging.Logger; - -import io.quarkus.arc.processor.ContextRegistrar; -import io.quarkus.builder.item.MultiBuildItem; - -/** - * Make it possible to register one or more custom CDI contexts. - * If you are registering a new context, you also pass in the respective annotation value into the constructor either in - * {@link DotName} form, or as {@code Class}. - * - * This information is then leveraged in {@link CustomScopeAnnotationsBuildItem} which allows consumers to browse - * all known custom scoped within deployment even early in the build process. - */ -public final class ContextRegistrarBuildItem extends MultiBuildItem { - - private static final Logger LOGGER = Logger.getLogger(ContextRegistrarBuildItem.class); - - private final ContextRegistrar contextRegistrar; - private final Collection annotationNames; - - public ContextRegistrarBuildItem(ContextRegistrar contextRegistrar, DotName... annotationsNames) { - this.contextRegistrar = contextRegistrar; - if (annotationsNames.length == 0) { - // log info level - usually you want to pass in annotation name as well - LOGGER.infof("A ContextRegistrarBuildItem was created but no annotation name/class was specified." + - "This information can be later on consumed by other extensions via CustomScopeAnnotationsBuildItem, " + - "please consider adding it."); - } - Collection names = new ArrayList<>(annotationsNames.length); - for (DotName name : annotationsNames) { - names.add(name); - } - this.annotationNames = names; - } - - public ContextRegistrarBuildItem(ContextRegistrar contextRegistrar, Class... annotationsClasses) { - this(contextRegistrar, Arrays.stream(annotationsClasses).map(Class::getName) - .map(DotName::createSimple).toArray(DotName[]::new)); - } - - public ContextRegistrar getContextRegistrar() { - return contextRegistrar; - } - - public Collection getAnnotationNames() { - return annotationNames; - } -} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrationPhaseBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrationPhaseBuildItem.java index 2e34c898d4f18..e8cb7c1487fe1 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrationPhaseBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ContextRegistrationPhaseBuildItem.java @@ -19,6 +19,7 @@ * {@link ArcProcessor#registerBeans(ContextRegistrationPhaseBuildItem, List)}. * * @see ContextConfiguratorBuildItem + * @see CustomScopeBuildItem */ public final class ContextRegistrationPhaseBuildItem extends SimpleBuildItem { diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeAnnotationsBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeAnnotationsBuildItem.java index cc537051f04c8..cbfa7f4b3ffa9 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeAnnotationsBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeAnnotationsBuildItem.java @@ -18,7 +18,7 @@ public final class CustomScopeAnnotationsBuildItem extends SimpleBuildItem { private final Set customScopeNames; - public CustomScopeAnnotationsBuildItem(Set customScopeNames) { + CustomScopeAnnotationsBuildItem(Set customScopeNames) { this.customScopeNames = customScopeNames; } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeBuildItem.java new file mode 100644 index 0000000000000..478b8a3913c65 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/CustomScopeBuildItem.java @@ -0,0 +1,32 @@ +package io.quarkus.arc.deployment; + +import java.lang.annotation.Annotation; + +import org.jboss.jandex.DotName; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * An extension that registers a custom CDI context via {@link ContextRegistrationPhaseBuildItem} should produce this build + * item in order to contribute the custom scope annotation name to the set of bean defining annotations. + * + * @see CustomScopeAnnotationsBuildItem + * @see ContextRegistrationPhaseBuildItem + */ +public final class CustomScopeBuildItem extends MultiBuildItem { + + private final DotName annotationName; + + public CustomScopeBuildItem(Class scope) { + this(DotName.createSimple(scope.getName())); + } + + public CustomScopeBuildItem(DotName annotationName) { + this.annotationName = annotationName; + } + + public DotName getAnnotationName() { + return annotationName; + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/HotDeploymentConfigBuildStep.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/HotDeploymentConfigBuildStep.java index 31976bb8fe051..e3804bf91d460 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/HotDeploymentConfigBuildStep.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/HotDeploymentConfigBuildStep.java @@ -1,13 +1,52 @@ package io.quarkus.arc.deployment; +import java.util.List; +import java.util.function.Predicate; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.DotName; +import org.jboss.jandex.Index; +import org.jboss.jandex.MethodParameterInfo; + +import io.quarkus.arc.processor.DotNames; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem; +import io.quarkus.deployment.dev.DisableInstrumentationForIndexPredicateBuildItem; +import io.quarkus.runtime.StartupEvent; public class HotDeploymentConfigBuildStep { + private static final DotName STARTUP_EVENT_NAME = DotName.createSimple(StartupEvent.class.getName()); + @BuildStep HotDeploymentWatchedFileBuildItem configFile() { return new HotDeploymentWatchedFileBuildItem("META-INF/beans.xml"); } + @BuildStep + DisableInstrumentationForIndexPredicateBuildItem startup() { + return new DisableInstrumentationForIndexPredicateBuildItem(new Predicate() { + @Override + public boolean test(Index index) { + if (!index.getAnnotations(StartupBuildSteps.STARTUP_NAME).isEmpty()) { + return true; + } + List observesInstances = index.getAnnotations(DotNames.OBSERVES); + if (!observesInstances.isEmpty()) { + for (AnnotationInstance observesInstance : observesInstances) { + if (observesInstance.target().kind() == AnnotationTarget.Kind.METHOD_PARAMETER) { + MethodParameterInfo methodParameterInfo = observesInstance.target().asMethodParameter(); + short paramPos = methodParameterInfo.position(); + if (STARTUP_EVENT_NAME.equals(methodParameterInfo.method().parameters().get(paramPos).name())) { + return true; + } + } + } + } + return false; + } + }); + } + } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java index 2fb195c60941c..7ff5ed89ca6f5 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java @@ -6,20 +6,25 @@ import io.quarkus.arc.runtime.ArcRecorder; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.Consume; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ApplicationStartBuildItem; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.builditem.RuntimeConfigSetupCompleteBuildItem; import io.quarkus.deployment.builditem.ServiceStartBuildItem; import io.quarkus.deployment.builditem.ShutdownContextBuildItem; public class LifecycleEventsBuildStep { + @Consume(RuntimeConfigSetupCompleteBuildItem.class) @BuildStep @Record(RUNTIME_INIT) ApplicationStartBuildItem startupEvent(ArcRecorder recorder, List startList, BeanContainerBuildItem beanContainer, - ShutdownContextBuildItem shutdown) { - recorder.handleLifecycleEvents(shutdown, beanContainer.getValue()); + ShutdownContextBuildItem shutdown, + LaunchModeBuildItem launchMode, ArcConfig config) { + recorder.handleLifecycleEvents(shutdown, launchMode.getLaunchMode(), config.test.disableApplicationLifecycleObservers); return new ApplicationStartBuildItem(); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverRegistrarBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverRegistrarBuildItem.java deleted file mode 100644 index b072e4ed4c689..0000000000000 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverRegistrarBuildItem.java +++ /dev/null @@ -1,24 +0,0 @@ -package io.quarkus.arc.deployment; - -import io.quarkus.arc.processor.ObserverRegistrar; -import io.quarkus.builder.item.MultiBuildItem; - -/** - * Used for registration of a synthetic observer; grants access to {@link ObserverRegistrar} which is an API allowing to - * specify a synthetic observer through series of configuration methods. - * - * This is a build time alternative to CDI ObserverMethodConfigurator API. - */ -public final class ObserverRegistrarBuildItem extends MultiBuildItem { - - private final ObserverRegistrar observerRegistrar; - - public ObserverRegistrarBuildItem(ObserverRegistrar observerRegistrar) { - this.observerRegistrar = observerRegistrar; - } - - public ObserverRegistrar getObserverRegistrar() { - return observerRegistrar; - } - -} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverValidationProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverValidationProcessor.java index c4f6d1fc3813d..79011c3c53674 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverValidationProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverValidationProcessor.java @@ -1,14 +1,13 @@ package io.quarkus.arc.deployment; -import java.util.Collection; - import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.DotName; import org.jboss.jandex.IndexView; import org.jboss.logging.Logger; +import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; import io.quarkus.arc.processor.Annotations; -import io.quarkus.arc.processor.BeanDeploymentValidator; +import io.quarkus.arc.processor.BuildExtension; import io.quarkus.arc.processor.BuiltinScope; import io.quarkus.arc.processor.DotNames; import io.quarkus.arc.processor.ObserverInfo; @@ -26,38 +25,30 @@ public class ObserverValidationProcessor { @BuildStep public void validateApplicationObserver(ApplicationArchivesBuildItem applicationArchivesBuildItem, - BuildProducer validators) { + ValidationPhaseBuildItem validationPhase, BuildProducer errors) { // an index of all root archive classes (usually src/main/classes) IndexView applicationClassesIndex = applicationArchivesBuildItem.getRootArchive().getIndex(); - - validators.produce(new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() { - - @Override - public void validate(ValidationContext context) { - Collection allObservers = context.get(Key.OBSERVERS); - // do the validation for each observer that can be found within application classes - for (ObserverInfo observer : allObservers) { - if (observer.isSynthetic()) { - // Skip synthetic observers - continue; - } - DotName declaringBeanDotName = observer.getDeclaringBean().getBeanClass(); - AnnotationInstance instance = Annotations.getParameterAnnotation(observer.getObserverMethod(), - DotNames.INITIALIZED); - if (applicationClassesIndex.getClassByName(declaringBeanDotName) != null && instance != null && - instance.value().asClass().name().equals(BuiltinScope.APPLICATION.getName())) { - // found an observer for @Initialized(ApplicationScoped.class) - // log a warning and recommend to use StartupEvent instead - final String observerWarning = "The method %s#%s is an observer for " + - "@Initialized(ApplicationScoped.class). Observer notification for this event may " + - "vary between JVM and native modes! We strongly recommend to observe StartupEvent " + - "instead as that one is consistently delivered in both modes once the container is " + - "running."; - LOGGER.warnf(observerWarning, observer.getDeclaringBean().getImplClazz(), - observer.getObserverMethod().name()); - } - } + // do the validation for each observer that can be found within application classes + for (ObserverInfo observer : validationPhase.getContext().get(BuildExtension.Key.OBSERVERS)) { + if (observer.isSynthetic()) { + // Skip synthetic observers + continue; + } + DotName declaringBeanDotName = observer.getDeclaringBean().getBeanClass(); + AnnotationInstance instance = Annotations.getParameterAnnotation(observer.getObserverMethod(), + DotNames.INITIALIZED); + if (applicationClassesIndex.getClassByName(declaringBeanDotName) != null && instance != null && + instance.value().asClass().name().equals(BuiltinScope.APPLICATION.getName())) { + // found an observer for @Initialized(ApplicationScoped.class) + // log a warning and recommend to use StartupEvent instead + final String observerWarning = "The method %s#%s is an observer for " + + "@Initialized(ApplicationScoped.class). Observer notification for this event may " + + "vary between JVM and native modes! We strongly recommend to observe StartupEvent " + + "instead as that one is consistently delivered in both modes once the container is " + + "running."; + LOGGER.warnf(observerWarning, observer.getDeclaringBean().getImplClazz(), + observer.getObserverMethod().name()); } - })); + } } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/PreAdditionalBeanBuildTimeConditionBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/PreAdditionalBeanBuildTimeConditionBuildItem.java new file mode 100644 index 0000000000000..1a23fb7cd6221 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/PreAdditionalBeanBuildTimeConditionBuildItem.java @@ -0,0 +1,40 @@ +package io.quarkus.arc.deployment; + +import org.jboss.jandex.AnnotationTarget; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * A type of build item that is similar to {@link BuildTimeConditionBuildItem} but evaluated before + * processing the {@link AdditionalBeanBuildItem} in order to filter the beans thanks to build time conditions + * before actually adding them with a {@link AdditionalBeanBuildItem}. + * + * @see io.quarkus.arc.deployment.BuildTimeConditionBuildItem + * @see io.quarkus.arc.deployment.AdditionalBeanBuildItem + */ +public final class PreAdditionalBeanBuildTimeConditionBuildItem extends MultiBuildItem { + + private final AnnotationTarget target; + private final boolean enabled; + + public PreAdditionalBeanBuildTimeConditionBuildItem(AnnotationTarget target, boolean enabled) { + switch (target.kind()) { + case CLASS: + case METHOD: + case FIELD: + this.target = target; + break; + default: + throw new IllegalArgumentException("'target' can only be a class, a field or a method"); + } + this.enabled = enabled; + } + + public AnnotationTarget getTarget() { + return target; + } + + public boolean isEnabled() { + return enabled; + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/QualifierRegistrarBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/QualifierRegistrarBuildItem.java new file mode 100644 index 0000000000000..61ba26d156ec8 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/QualifierRegistrarBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.arc.processor.QualifierRegistrar; +import io.quarkus.builder.item.MultiBuildItem; + +/** + * Makes it possible to register annotations that should be considered qualifiers but are not annotated with + * {@code javax.inject.Qualifier}. + */ +public final class QualifierRegistrarBuildItem extends MultiBuildItem { + + private final QualifierRegistrar registrar; + + public QualifierRegistrarBuildItem(QualifierRegistrar registrar) { + this.registrar = registrar; + } + + public QualifierRegistrar getQualifierRegistrar() { + return registrar; + } +} \ No newline at end of file diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/RegisteredComponentsBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/RegisteredComponentsBuildItem.java new file mode 100644 index 0000000000000..47ce22ad5f548 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/RegisteredComponentsBuildItem.java @@ -0,0 +1,66 @@ +package io.quarkus.arc.deployment; + +import java.util.Collection; +import java.util.stream.Stream; + +import io.quarkus.arc.processor.BeanDeployment; +import io.quarkus.arc.processor.BeanInfo; +import io.quarkus.arc.processor.BeanResolver; +import io.quarkus.arc.processor.BeanStream; +import io.quarkus.arc.processor.InjectionPointInfo; +import io.quarkus.arc.processor.ObserverInfo; +import io.quarkus.builder.item.SimpleBuildItem; + +abstract class RegisteredComponentsBuildItem extends SimpleBuildItem { + + private final Collection beans; + private final Collection injectionPoints; + private final Collection observers; + private final BeanResolver beanResolver; + + public RegisteredComponentsBuildItem(BeanDeployment beanDeployment) { + this.beans = beanDeployment.getBeans(); + this.injectionPoints = beanDeployment.getInjectionPoints(); + this.observers = beanDeployment.getObservers(); + this.beanResolver = beanDeployment.getBeanResolver(); + } + + /** + * @return the registered beans + */ + public Collection geBeans() { + return beans; + } + + /** + * @return the registered injection points + */ + public Collection getInjectionPoints() { + return injectionPoints; + } + + /** + * @return the registered observers + */ + public Collection getObservers() { + return observers; + } + + /** + * + * @return a convenient {@link Stream} wrapper that can be used to filter a set of beans + */ + public BeanStream beanStream() { + return new BeanStream(beans); + } + + /** + * The bean resolver can be used to apply the type-safe resolution rules. + * + * @return the bean resolver + */ + public BeanResolver getBeanResolver() { + return beanResolver; + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java index 8261d2e093c54..3fc42d9158fcd 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java @@ -1,6 +1,5 @@ package io.quarkus.arc.deployment; -import javax.enterprise.context.ApplicationScoped; import javax.enterprise.context.spi.Contextual; import javax.enterprise.context.spi.CreationalContext; @@ -8,7 +7,6 @@ import org.jboss.jandex.AnnotationTarget.Kind; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.DotName; -import org.jboss.logging.Logger; import io.quarkus.arc.Arc; import io.quarkus.arc.ArcContainer; @@ -18,8 +16,6 @@ import io.quarkus.arc.deployment.ObserverRegistrationPhaseBuildItem.ObserverConfiguratorBuildItem; import io.quarkus.arc.impl.CreationalContextImpl; import io.quarkus.arc.processor.AnnotationStore; -import io.quarkus.arc.processor.Annotations; -import io.quarkus.arc.processor.AnnotationsTransformer; import io.quarkus.arc.processor.BeanInfo; import io.quarkus.arc.processor.BuildExtension; import io.quarkus.arc.processor.BuiltinScope; @@ -48,29 +44,14 @@ public class StartupBuildSteps { static final MethodDescriptor CONTEXTUAL_DESTROY = MethodDescriptor.ofMethod(Contextual.class, "destroy", void.class, Object.class, CreationalContext.class); - private static final Logger LOGGER = Logger.getLogger(StartupBuildSteps.class); - @BuildStep - AnnotationsTransformerBuildItem annotationTransformer(CustomScopeAnnotationsBuildItem customScopes) { - return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { - - @Override - public boolean appliesTo(org.jboss.jandex.AnnotationTarget.Kind kind) { - return kind == org.jboss.jandex.AnnotationTarget.Kind.CLASS; - } - - @Override - public void transform(TransformationContext context) { - if (context.isClass() && !customScopes.isScopeDeclaredOn(context.getTarget().asClass())) { - // Class with no built-in scope annotation but with @Scheduled method - if (Annotations.contains(context.getTarget().asClass().classAnnotations(), STARTUP_NAME)) { - LOGGER.debugf("Found @Startup on a class %s with no scope annotations - adding @ApplicationScoped", - context.getTarget()); - context.transform().add(ApplicationScoped.class).done(); - } - } - } - }); + AutoAddScopeBuildItem addScope(CustomScopeAnnotationsBuildItem customScopes) { + // Class with no built-in scope annotation but with @Startup method + return AutoAddScopeBuildItem.builder() + .defaultScope(BuiltinScope.APPLICATION) + .isAnnotatedWith(STARTUP_NAME) + .reason("Found classes containing @Startup annotation.") + .build(); } @BuildStep diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SynthesisFinishedBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SynthesisFinishedBuildItem.java new file mode 100644 index 0000000000000..a345fc282c4b2 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SynthesisFinishedBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.arc.deployment; + +import io.quarkus.arc.processor.BeanDeployment; + +/** + * Consumers of this build item can easily inspect all beans, observers and injection points registered in the + * application. Synthetic beans and observers are included. If interested in class-based components only you can use the + * {@link BeanDiscoveryFinishedBuildItem} instead. + *

+ * Additionaly, the bean resolver can be used to apply the type-safe resolution rules, e.g. to find out whether there is a bean + * that would satisfy certain combination of required type and qualifiers. + * + * @see BeanDiscoveryFinishedBuildItem + */ +public final class SynthesisFinishedBuildItem extends RegisteredComponentsBuildItem { + + public SynthesisFinishedBuildItem(BeanDeployment beanDeployment) { + super(beanDeployment); + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java index e9a9df065be05..26d614e751315 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java @@ -6,23 +6,39 @@ import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.DotName; -import io.quarkus.arc.processor.BeanConfigurator; import io.quarkus.arc.processor.BeanConfiguratorBase; +import io.quarkus.arc.processor.BeanRegistrar; import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.runtime.RuntimeValue; /** - * Makes it possible to register a synthetic bean whose instance can be easily produced through a recorder. + * Makes it possible to register a synthetic bean. + *

+ * Bean instances can be easily produced through a recorder and set via {@link ExtendedBeanConfigurator#supplier(Supplier)} and + * {@link ExtendedBeanConfigurator#runtimeValue(RuntimeValue)}. * - * @see BeanConfigurator + * @see ExtendedBeanConfigurator + * @see BeanRegistrar */ public final class SyntheticBeanBuildItem extends MultiBuildItem { + /** + * + * @param implClazz + * @return a new configurator instance + * @see ExtendedBeanConfigurator#done() + */ public static ExtendedBeanConfigurator configure(Class implClazz) { return configure(DotName.createSimple(implClazz.getName())); } + /** + * + * @param implClazz + * @return a new configurator instance + * @see ExtendedBeanConfigurator#done() + */ public static ExtendedBeanConfigurator configure(DotName implClazz) { return new ExtendedBeanConfigurator(implClazz).addType(implClazz); } @@ -33,22 +49,26 @@ public static ExtendedBeanConfigurator configure(DotName implClazz) { this.configurator = configurator; } - public ExtendedBeanConfigurator configurator() { + ExtendedBeanConfigurator configurator() { return configurator; } - public boolean isStaticInit() { + boolean isStaticInit() { return configurator.staticInit; } + boolean hasRecorderInstance() { + return configurator.supplier != null || configurator.runtimeValue != null; + } + /** * This construct is not thread-safe and should not be reused. */ public static class ExtendedBeanConfigurator extends BeanConfiguratorBase { - Supplier supplier; - RuntimeValue runtimeValue; - boolean staticInit; + private Supplier supplier; + private RuntimeValue runtimeValue; + private boolean staticInit; ExtendedBeanConfigurator(DotName implClazz) { super(implClazz); @@ -64,8 +84,9 @@ public SyntheticBeanBuildItem done() { if (supplier != null && runtimeValue != null) { throw new IllegalStateException("It is not possible to specify both - a supplier and a runtime value"); } - if (supplier == null && runtimeValue == null) { - throw new IllegalStateException("Either a supplier or a runtime value must be set"); + if (creatorConsumer == null && supplier == null && runtimeValue == null) { + throw new IllegalStateException( + "Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator#creator(), ExtendedBeanConfigurator#supplier() or ExtendedBeanConfigurator#runtimeValue()"); } return new SyntheticBeanBuildItem(this); } @@ -81,22 +102,30 @@ public ExtendedBeanConfigurator runtimeValue(RuntimeValue runtimeValue) { } /** - * By default, synthetic beans are initialized during {@link ExecutionTime#STATIC_INIT}. It is possible to mark a - * synthetic bean to be initialized during {@link ExecutionTime#RUNTIME_INIT}. However, in such case a client that - * attempts to obtain such bean during {@link ExecutionTime#STATIC_INIT} will receive an exception. + * A synthetic bean whose instance is produced through a recorder is initialized during + * {@link ExecutionTime#STATIC_INIT} by default. + *

+ * It is possible to change this behavior and initialize the bean during the {@link ExecutionTime#RUNTIME_INIT}. + * However, in such case a client that attempts to obtain such bean during {@link ExecutionTime#STATIC_INIT} or before + * runtime-init synthetic beans are + * initialized will receive an exception. + *

+ * {@link ExecutionTime#RUNTIME_INIT} build steps that access a runtime-init synthetic bean should consume the + * {@link SyntheticBeansRuntimeInitBuildItem}. * * @return self + * @see SyntheticBeansRuntimeInitBuildItem */ public ExtendedBeanConfigurator setRuntimeInit() { this.staticInit = false; return this; } - public DotName getImplClazz() { + DotName getImplClazz() { return implClazz; } - public Set getQualifiers() { + Set getQualifiers() { return qualifiers; } @@ -105,5 +134,13 @@ protected ExtendedBeanConfigurator self() { return this; } + Supplier getSupplier() { + return supplier; + } + + RuntimeValue getRuntimeValue() { + return runtimeValue; + } + } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansProcessor.java index 0498d1d218cb6..7161c6bf4cb55 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansProcessor.java @@ -11,6 +11,7 @@ import org.jboss.jandex.DotName; import io.quarkus.arc.deployment.BeanRegistrationPhaseBuildItem.BeanConfiguratorBuildItem; +import io.quarkus.arc.processor.BeanConfigurator; import io.quarkus.arc.runtime.ArcRecorder; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; @@ -34,8 +35,8 @@ void initStatic(ArcRecorder recorder, List syntheticBean Map> suppliersMap = new HashMap<>(); for (SyntheticBeanBuildItem bean : syntheticBeans) { - if (bean.isStaticInit()) { - initSyntheticBean(recorder, suppliersMap, beanRegistration, bean); + if (bean.hasRecorderInstance() && bean.isStaticInit()) { + configureSyntheticBean(recorder, suppliersMap, beanRegistration, bean); } } // Init the map of bean instances @@ -51,27 +52,40 @@ ServiceStartBuildItem initRuntime(ArcRecorder recorder, List> suppliersMap = new HashMap<>(); for (SyntheticBeanBuildItem bean : syntheticBeans) { - if (!bean.isStaticInit()) { - initSyntheticBean(recorder, suppliersMap, beanRegistration, bean); + if (bean.hasRecorderInstance() && !bean.isStaticInit()) { + configureSyntheticBean(recorder, suppliersMap, beanRegistration, bean); } } recorder.initRuntimeSupplierBeans(suppliersMap); return new ServiceStartBuildItem("runtime-bean-init"); } - private void initSyntheticBean(ArcRecorder recorder, Map> suppliersMap, + @BuildStep + void initRegular(List syntheticBeans, + BeanRegistrationPhaseBuildItem beanRegistration, BuildProducer configurators) { + + for (SyntheticBeanBuildItem bean : syntheticBeans) { + if (!bean.hasRecorderInstance()) { + configureSyntheticBean(null, null, beanRegistration, bean); + } + } + } + + private void configureSyntheticBean(ArcRecorder recorder, Map> suppliersMap, BeanRegistrationPhaseBuildItem beanRegistration, SyntheticBeanBuildItem bean) { DotName implClazz = bean.configurator().getImplClazz(); String name = createName(implClazz.toString(), bean.configurator().getQualifiers().toString()); - if (bean.configurator().runtimeValue != null) { - suppliersMap.put(name, recorder.createSupplier(bean.configurator().runtimeValue)); - } else { - suppliersMap.put(name, bean.configurator().supplier); + if (bean.configurator().getRuntimeValue() != null) { + suppliersMap.put(name, recorder.createSupplier(bean.configurator().getRuntimeValue())); + } else if (bean.configurator().getSupplier() != null) { + suppliersMap.put(name, bean.configurator().getSupplier()); + } + BeanConfigurator configurator = beanRegistration.getContext().configure(implClazz) + .read(bean.configurator()); + if (bean.hasRecorderInstance()) { + configurator.creator(creator(name, bean)); } - beanRegistration.getContext().configure(implClazz) - .read(bean.configurator()) - .creator(creator(name)) - .done(); + configurator.done(); } private String createName(String beanClass, String qualifiers) { @@ -79,7 +93,7 @@ private String createName(String beanClass, String qualifiers) { + HashUtil.sha1(qualifiers); } - private Consumer creator(String name) { + private Consumer creator(String name, SyntheticBeanBuildItem bean) { return new Consumer() { @Override public void accept(MethodCreator m) { @@ -90,7 +104,7 @@ public void accept(MethodCreator m) { m.load(name)); // Throw an exception if no supplier is found m.ifNull(supplier).trueBranch().throwException(CreationException.class, - "Synthetic bean instance not initialized yet: " + name); + createMessage(name, bean)); ResultHandle result = m.invokeInterfaceMethod( MethodDescriptor.ofMethod(Supplier.class, "get", Object.class), supplier); @@ -99,4 +113,18 @@ public void accept(MethodCreator m) { }; } + private String createMessage(String name, SyntheticBeanBuildItem bean) { + StringBuilder builder = new StringBuilder(); + builder.append("Synthetic bean instance for "); + builder.append(bean.configurator().getImplClazz()); + builder.append(" not initialized yet: "); + builder.append(name); + if (!bean.isStaticInit()) { + builder.append("\n\t- a synthetic bean initialized during RUNTIME_INIT must not be accessed during STATIC_INIT"); + builder.append( + "\n\t- RUNTIME_INIT build steps that require access to synthetic beans initialized during RUNTIME_INIT should consume the SyntheticBeansRuntimeInitBuildItem"); + } + return builder.toString(); + } + } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansRuntimeInitBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansRuntimeInitBuildItem.java index 9857f0ce84640..ac094c35ef22f 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansRuntimeInitBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeansRuntimeInitBuildItem.java @@ -4,6 +4,8 @@ /** * This build item should be consumed by build steps that require RUNTIME_INIT synthetic beans to be initialized. + * + * @see SyntheticBeanBuildItem */ public final class SyntheticBeansRuntimeInitBuildItem extends EmptyBuildItem { diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/TransformedAnnotationsBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/TransformedAnnotationsBuildItem.java index 10bc173dc5535..bb5f5574f968a 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/TransformedAnnotationsBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/TransformedAnnotationsBuildItem.java @@ -5,6 +5,7 @@ import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.DotName; import io.quarkus.arc.processor.AnnotationsTransformer; import io.quarkus.arc.processor.BeanDeployment; @@ -18,19 +19,23 @@ public final class TransformedAnnotationsBuildItem extends SimpleBuildItem implements Function> { - private final Function> fun; + private final BeanDeployment beanDeployment; TransformedAnnotationsBuildItem(BeanDeployment beanDeployment) { - this.fun = beanDeployment::getAnnotations; + this.beanDeployment = beanDeployment; } public Collection getAnnotations(AnnotationTarget target) { - return fun.apply(target); + return beanDeployment.getAnnotations(target); + } + + public AnnotationInstance getAnnotation(AnnotationTarget target, DotName name) { + return beanDeployment.getAnnotation(target, name); } @Override public Collection apply(AnnotationTarget target) { - return fun.apply(target); + return beanDeployment.getAnnotations(target); } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationsProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationsProcessor.java new file mode 100644 index 0000000000000..9ddc293634783 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationsProcessor.java @@ -0,0 +1,88 @@ +package io.quarkus.arc.deployment; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.DotName; +import org.jboss.jandex.IndexView; + +import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.ApplicationIndexBuildItem; + +public class WrongAnnotationsProcessor { + + @BuildStep + void detect(ArcConfig config, ApplicationIndexBuildItem applicationIndex, + BuildProducer validationError) { + + if (!config.detectWrongAnnotations) { + return; + } + + IndexView index = applicationIndex.getIndex(); + List wrongAnnotations = new ArrayList<>(); + Function singletonFun = new Function() { + + @Override + public String apply(AnnotationInstance annotationInstance) { + return String.format("%s declared on %s, use @javax.inject.Singleton instead", + annotationInstance.toString(false), getTargetInfo(annotationInstance)); + } + }; + wrongAnnotations.add(new WrongAnnotation(DotName.createSimple("com.google.inject.Singleton"), singletonFun)); + wrongAnnotations.add(new WrongAnnotation(DotName.createSimple("javax.ejb.Singleton"), singletonFun)); + wrongAnnotations.add(new WrongAnnotation(DotName.createSimple("groovy.lang.Singleton"), singletonFun)); + wrongAnnotations.add(new WrongAnnotation(DotName.createSimple("jakarta.ejb.Singleton"), singletonFun)); + + Map wrongUsages = new HashMap<>(); + + for (WrongAnnotation wrongAnnotation : wrongAnnotations) { + for (AnnotationInstance annotationInstance : index.getAnnotations(wrongAnnotation.name)) { + wrongUsages.put(annotationInstance, wrongAnnotation.messageFun.apply(annotationInstance)); + } + } + + if (!wrongUsages.isEmpty()) { + for (Entry entry : wrongUsages.entrySet()) { + validationError.produce(new ValidationErrorBuildItem( + new IllegalStateException(entry.getValue()))); + } + } + } + + private static class WrongAnnotation { + + final DotName name; + final Function messageFun; + + WrongAnnotation(DotName name, Function messageFun) { + this.name = name; + this.messageFun = messageFun; + } + + } + + private static String getTargetInfo(AnnotationInstance annotationInstance) { + AnnotationTarget target = annotationInstance.target(); + switch (target.kind()) { + case FIELD: + return target.asField().declaringClass().toString() + "." + + target.asField().name(); + case METHOD: + return target.asMethod().declaringClass().toString() + + "." + + target.asMethod().name() + "()"; + default: + return target.toString(); + } + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ClassConfigPropertiesUtil.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ClassConfigPropertiesUtil.java index 64bdaa58cac56..84212e48284a3 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ClassConfigPropertiesUtil.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ClassConfigPropertiesUtil.java @@ -3,6 +3,7 @@ import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.createReadMandatoryValueAndConvertIfNeeded; import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.createReadOptionalValueAndConvertIfNeeded; import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.determineSingleGenericType; +import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.registerImplicitConverter; import java.lang.reflect.Modifier; import java.util.ArrayList; @@ -16,9 +17,11 @@ import javax.enterprise.event.Observes; import javax.enterprise.inject.Default; import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.DeploymentException; import javax.inject.Inject; import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.ClassInfo; @@ -32,8 +35,11 @@ import io.quarkus.arc.config.ConfigProperties; import io.quarkus.arc.deployment.ConfigPropertyBuildItem; import io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.ReadOptionalResponse; +import io.quarkus.deployment.Capabilities; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.bean.JavaBeanUtil; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; import io.quarkus.gizmo.BranchResult; import io.quarkus.gizmo.BytecodeCreator; @@ -55,7 +61,28 @@ final class ClassConfigPropertiesUtil { private static final String HIBERNATE_VALIDATOR_IMPL_CLASS = "org.hibernate.validator.HibernateValidator"; private static final String CONSTRAINT_VIOLATION_EXCEPTION_CLASS = "javax.validation.ConstraintViolationException"; - private ClassConfigPropertiesUtil() { + private final IndexView applicationIndex; + private final YamlListObjectHandler yamlListObjectHandler; + private final ClassCreator producerClassCreator; + private final Capabilities capabilities; + private final BuildProducer reflectiveClasses; + private final BuildProducer reflectiveMethods; + private final BuildProducer configProperties; + + ClassConfigPropertiesUtil(IndexView applicationIndex, YamlListObjectHandler yamlListObjectHandler, + ClassCreator producerClassCreator, + Capabilities capabilities, + BuildProducer reflectiveClasses, + BuildProducer reflectiveMethods, + BuildProducer configProperties) { + + this.applicationIndex = applicationIndex; + this.yamlListObjectHandler = yamlListObjectHandler; + this.producerClassCreator = producerClassCreator; + this.capabilities = capabilities; + this.reflectiveClasses = reflectiveClasses; + this.reflectiveMethods = reflectiveMethods; + this.configProperties = configProperties; } /** @@ -122,12 +149,10 @@ static void generateStartupObserverThatInjectsConfigClass(ClassOutput classOutpu /** * @return true if the configuration class needs validation */ - static boolean addProducerMethodForClassConfigProperties(ClassLoader classLoader, ClassInfo configPropertiesClassInfo, - ClassCreator producerClassCreator, String prefixStr, ConfigProperties.NamingStrategy namingStrategy, + boolean addProducerMethodForClassConfigProperties(ClassLoader classLoader, ClassInfo configPropertiesClassInfo, + String prefixStr, ConfigProperties.NamingStrategy namingStrategy, boolean failOnMismatchingMember, - boolean needsQualifier, IndexView applicationIndex, - BuildProducer reflectiveMethods, - BuildProducer configProperties) { + boolean needsQualifier) { if (!configPropertiesClassInfo.hasNoArgsConstructor()) { throw new IllegalArgumentException( @@ -180,7 +205,7 @@ static boolean addProducerMethodForClassConfigProperties(ClassLoader classLoader } ResultHandle configObject = populateConfigObject(classLoader, configPropertiesClassInfo, prefixStr, namingStrategy, - failOnMismatchingMember, methodCreator, applicationIndex, reflectiveMethods, configProperties); + failOnMismatchingMember, methodCreator); if (needsValidation) { createValidationCodePath(methodCreator, configObject, prefixStr); @@ -202,18 +227,15 @@ private static boolean needsValidation() { private static boolean isHibernateValidatorInClasspath() { try { - Class.forName(HIBERNATE_VALIDATOR_IMPL_CLASS); + Class.forName(HIBERNATE_VALIDATOR_IMPL_CLASS, false, Thread.currentThread().getContextClassLoader()); return true; } catch (ClassNotFoundException e) { return false; } } - private static ResultHandle populateConfigObject(ClassLoader classLoader, ClassInfo configClassInfo, String prefixStr, - ConfigProperties.NamingStrategy namingStrategy, boolean failOnMismatchingMember, MethodCreator methodCreator, - IndexView applicationIndex, - BuildProducer reflectiveMethods, - BuildProducer configProperties) { + private ResultHandle populateConfigObject(ClassLoader classLoader, ClassInfo configClassInfo, String prefixStr, + ConfigProperties.NamingStrategy namingStrategy, boolean failOnMismatchingMember, MethodCreator methodCreator) { String configObjectClassStr = configClassInfo.name().toString(); ResultHandle configObject = methodCreator.newInstance(MethodDescriptor.ofConstructor(configObjectClassStr)); @@ -238,10 +260,15 @@ private static ResultHandle populateConfigObject(ClassLoader classLoader, ClassI if (field.hasAnnotation(DotNames.CONFIG_IGNORE)) { continue; } - if (field.hasAnnotation(DotNames.CONFIG_PROPERTY)) { - LOGGER.warn( - "'@ConfigProperty' is ignored when added to a field of a class annotated with '@ConfigProperties'. Offending field is '" - + field.name() + "' of class '" + field.declaringClass().toString() + "'"); + AnnotationInstance configPropertyAnnotation = field.annotation(DotNames.CONFIG_PROPERTY); + if (configPropertyAnnotation != null) { + AnnotationValue configPropertyDefaultValue = configPropertyAnnotation.value("defaultValue"); + if ((configPropertyDefaultValue != null) + && !configPropertyDefaultValue.asString().equals(ConfigProperty.UNCONFIGURED_VALUE)) { + LOGGER.warn( + "'defaultValue' of '@ConfigProperty' is ignored when added to a field of a class annotated with '@ConfigProperties'. Offending field is '" + + field.name() + "' of class '" + field.declaringClass().toString() + "'"); + } } boolean useFieldAccess = false; @@ -309,8 +336,7 @@ private static ResultHandle populateConfigObject(ClassLoader classLoader, ClassI ResultHandle nestedConfigObject = populateConfigObject(classLoader, fieldTypeClassInfo, getFullConfigName(prefixStr, namingStrategy, field), namingStrategy, failOnMismatchingMember, - methodCreator, - applicationIndex, reflectiveMethods, configProperties); + methodCreator); createWriteValue(methodCreator, configObject, field, setter, useFieldAccess, nestedConfigObject); } } else { @@ -321,6 +347,7 @@ private static ResultHandle populateConfigObject(ClassLoader classLoader, ClassI // config.getOptionalValue if (genericType.kind() != Type.Kind.PARAMETERIZED_TYPE) { + registerImplicitConverter(genericType, reflectiveClasses); ResultHandle setterValue = methodCreator.invokeInterfaceMethod( MethodDescriptor.ofMethod(Config.class, "getOptionalValue", Optional.class, String.class, Class.class), @@ -344,7 +371,18 @@ private static ResultHandle populateConfigObject(ClassLoader classLoader, ClassI readOptionalResponse.getIsPresentFalse().invokeStaticMethod( MethodDescriptor.ofMethod(Optional.class, "empty", Optional.class))); } + } else if (ConfigPropertiesUtil.isListOfObject(fieldType)) { + if (!capabilities.isPresent(Capability.CONFIG_YAML)) { + throw new DeploymentException( + "Support for List of objects in classes annotated with '@ConfigProperties' is only possible via the 'quarkus-config-yaml' extension. Offending field is '" + + field.name() + "' of class '" + field.declaringClass().name().toString()); + } + ResultHandle setterValue = yamlListObjectHandler.handle(new YamlListObjectHandler.FieldMember(field), + methodCreator, mpConfig, + getEffectiveConfigName(namingStrategy, field), fullConfigName); + createWriteValue(methodCreator, configObject, field, setter, useFieldAccess, setterValue); } else { + registerImplicitConverter(fieldType, reflectiveClasses); populateTypicalProperty(methodCreator, configObject, configPropertyBuildItemCandidates, currentClassInHierarchy, field, useFieldAccess, fieldType, setter, mpConfig, fullConfigName); @@ -414,12 +452,26 @@ private static void populateTypicalProperty(MethodCreator methodCreator, ResultH createWriteValue(methodCreator, configObject, field, setter, useFieldAccess, setterValue); } - configPropertyBuildItemCandidates - .add(new ConfigPropertyBuildItemCandidate(field.name(), fullConfigName, fieldType)); + if (field.type().kind() != Type.Kind.PRIMITIVE) { // the JVM assigns primitive types a default even though it doesn't show up in the bytecode + configPropertyBuildItemCandidates + .add(new ConfigPropertyBuildItemCandidate(field.name(), fullConfigName, fieldType)); + } } private static String getFullConfigName(String prefixStr, ConfigProperties.NamingStrategy namingStrategy, FieldInfo field) { - return prefixStr + "." + namingStrategy.getName(field.name()); + return prefixStr + "." + getEffectiveConfigName(namingStrategy, field); + } + + private static String getEffectiveConfigName(ConfigProperties.NamingStrategy namingStrategy, FieldInfo field) { + String nameToUse = field.name(); + AnnotationInstance configPropertyAnnotation = field.annotation(DotNames.CONFIG_PROPERTY); + if (configPropertyAnnotation != null) { + AnnotationValue configPropertyNameValue = configPropertyAnnotation.value("name"); + if ((configPropertyNameValue != null) && !configPropertyNameValue.asString().isEmpty()) { + nameToUse = configPropertyNameValue.asString(); + } + } + return namingStrategy.getName(nameToUse); } private static void createWriteValue(BytecodeCreator bytecodeCreator, ResultHandle configObject, FieldInfo field, diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesBuildStep.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesBuildStep.java index 96db87f92c62c..88ff89a1acabf 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesBuildStep.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesBuildStep.java @@ -1,8 +1,5 @@ package io.quarkus.arc.deployment.configproperties; -import static io.quarkus.arc.deployment.configproperties.InterfaceConfigPropertiesUtil.addProducerMethodForInterfaceConfigProperties; -import static io.quarkus.arc.deployment.configproperties.InterfaceConfigPropertiesUtil.generateImplementationForInterfaceConfigProperties; - import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.HashSet; @@ -25,12 +22,14 @@ import io.quarkus.arc.deployment.GeneratedBeanBuildItem; import io.quarkus.arc.deployment.GeneratedBeanGizmoAdaptor; import io.quarkus.arc.deployment.configproperties.InterfaceConfigPropertiesUtil.GeneratedClass; +import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.GeneratedClassGizmoAdaptor; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.GeneratedClassBuildItem; import io.quarkus.deployment.builditem.RunTimeConfigurationDefaultBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; import io.quarkus.gizmo.ClassCreator; import io.quarkus.gizmo.ClassOutput; @@ -105,10 +104,12 @@ private String getPrefix(AnnotationInstance annotationInstance) { @BuildStep void setup(CombinedIndexBuildItem combinedIndex, List configPropertiesMetadataList, + Capabilities capabilities, BuildProducer generatedClasses, BuildProducer generatedBeans, BuildProducer defaultConfigValues, BuildProducer reflectiveMethods, + BuildProducer reflectiveClasses, BuildProducer configProperties) { if (configPropertiesMetadataList.isEmpty()) { return; @@ -128,6 +129,14 @@ void setup(CombinedIndexBuildItem combinedIndex, producerClassCreator.addAnnotation(Singleton.class); Set configClassesThatNeedValidation = new HashSet<>(configPropertiesMetadataList.size()); + IndexView index = combinedIndex.getIndex(); + YamlListObjectHandler yamlListObjectHandler = new YamlListObjectHandler(nonBeansClassOutput, index, reflectiveClasses); + ClassConfigPropertiesUtil classConfigPropertiesUtil = new ClassConfigPropertiesUtil(index, + yamlListObjectHandler, producerClassCreator, capabilities, reflectiveClasses, reflectiveMethods, + configProperties); + InterfaceConfigPropertiesUtil interfaceConfigPropertiesUtil = new InterfaceConfigPropertiesUtil(index, + yamlListObjectHandler, nonBeansClassOutput, producerClassCreator, capabilities, defaultConfigValues, + configProperties, reflectiveClasses); for (ConfigPropertiesMetadataBuildItem configPropertiesMetadata : configPropertiesMetadataList) { ClassInfo classInfo = configPropertiesMetadata.getClassInfo(); @@ -139,14 +148,13 @@ void setup(CombinedIndexBuildItem combinedIndex, */ Map interfaceToGeneratedClass = new HashMap<>(); - generateImplementationForInterfaceConfigProperties( - classInfo, nonBeansClassOutput, combinedIndex.getIndex(), configPropertiesMetadata.getPrefix(), - configPropertiesMetadata.getNamingStrategy(), defaultConfigValues, configProperties, + interfaceConfigPropertiesUtil.generateImplementationForInterfaceConfigProperties( + classInfo, configPropertiesMetadata.getPrefix(), + configPropertiesMetadata.getNamingStrategy(), interfaceToGeneratedClass); for (Map.Entry entry : interfaceToGeneratedClass.entrySet()) { - addProducerMethodForInterfaceConfigProperties(entry.getKey(), + interfaceConfigPropertiesUtil.addProducerMethodForInterfaceConfigProperties(entry.getKey(), configPropertiesMetadata.getPrefix(), configPropertiesMetadata.isNeedsQualifier(), - producerClassCreator, entry.getValue()); } } else { @@ -154,11 +162,10 @@ void setup(CombinedIndexBuildItem combinedIndex, * In this case the producer method contains all the logic to instantiate the config class * and call setters for value obtained from MP Config */ - boolean needsValidation = ClassConfigPropertiesUtil.addProducerMethodForClassConfigProperties( - Thread.currentThread().getContextClassLoader(), classInfo, producerClassCreator, + boolean needsValidation = classConfigPropertiesUtil.addProducerMethodForClassConfigProperties( + Thread.currentThread().getContextClassLoader(), classInfo, configPropertiesMetadata.getPrefix(), configPropertiesMetadata.getNamingStrategy(), - configPropertiesMetadata.isFailOnMismatchingMember(), configPropertiesMetadata.isNeedsQualifier(), - combinedIndex.getIndex(), reflectiveMethods, configProperties); + configPropertiesMetadata.isFailOnMismatchingMember(), configPropertiesMetadata.isNeedsQualifier()); if (needsValidation) { configClassesThatNeedValidation.add(classInfo.name()); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesMetadataBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesMetadataBuildItem.java index 3d5bd1f82c28a..8e3e301193e16 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesMetadataBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesMetadataBuildItem.java @@ -56,7 +56,7 @@ private String sanitisePrefix(String prefix) { } private boolean isPrefixUnset(String prefix) { - return prefix == null || "".equals(prefix.trim()) || ConfigProperties.UNSET_PREFIX.equals(prefix.trim()); + return prefix == null || prefix.trim().isEmpty() || ConfigProperties.UNSET_PREFIX.equals(prefix.trim()); } private String getPrefixFromClassName(DotName className) { diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesUtil.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesUtil.java index dc90cebe401f5..48de13b00794b 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesUtil.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/ConfigPropertiesUtil.java @@ -9,6 +9,9 @@ import org.jboss.jandex.ParameterizedType; import org.jboss.jandex.Type; +import io.quarkus.arc.deployment.ConfigBuildStep; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.gizmo.BranchResult; import io.quarkus.gizmo.BytecodeCreator; import io.quarkus.gizmo.MethodDescriptor; @@ -107,6 +110,20 @@ static ReadOptionalResponse createReadOptionalValueAndConvertIfNeeded(String pro return new ReadOptionalResponse(value, isPresentTrue, isPresentBranch.falseBranch()); } + public static boolean isListOfObject(Type type) { + if (type.kind() != Type.Kind.PARAMETERIZED_TYPE) { + return false; + } + ParameterizedType parameterizedType = (ParameterizedType) type; + if (!DotNames.LIST.equals(parameterizedType.name())) { + return false; + } + if (parameterizedType.arguments().size() != 1) { + return false; + } + return !parameterizedType.arguments().get(0).name().toString().startsWith("java"); + } + private static boolean isCollection(final Type resultType) { return DotNames.COLLECTION.equals(resultType.name()) || DotNames.LIST.equals(resultType.name()) || @@ -128,6 +145,15 @@ static Type determineSingleGenericType(Type type, DotName declaringClass) { return type.asParameterizedType().arguments().get(0); } + static void registerImplicitConverter(Type type, BuildProducer reflectiveClasses) { + // We need to register for reflection in case an implicit converter is required. + if (!ConfigBuildStep.isHandledByProducers(type)) { + if (type.kind() != Type.Kind.ARRAY) { + reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, type.name().toString())); + } + } + } + static class ReadOptionalResponse { private final ResultHandle value; //this is only valid within 'isPresentTrue' private final BytecodeCreator isPresentTrue; diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/InterfaceConfigPropertiesUtil.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/InterfaceConfigPropertiesUtil.java index bd919b970863c..0efa06befc810 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/InterfaceConfigPropertiesUtil.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/InterfaceConfigPropertiesUtil.java @@ -3,6 +3,7 @@ import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.createReadMandatoryValueAndConvertIfNeeded; import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.createReadOptionalValueAndConvertIfNeeded; import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.determineSingleGenericType; +import static io.quarkus.arc.deployment.configproperties.ConfigPropertiesUtil.registerImplicitConverter; import static io.quarkus.gizmo.MethodDescriptor.ofMethod; import java.lang.annotation.Annotation; @@ -15,6 +16,7 @@ import javax.enterprise.inject.Default; import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.DeploymentException; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.inject.ConfigProperty; @@ -32,9 +34,12 @@ import io.quarkus.arc.Unremovable; import io.quarkus.arc.config.ConfigProperties; import io.quarkus.arc.deployment.ConfigPropertyBuildItem; +import io.quarkus.deployment.Capabilities; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.bean.JavaBeanUtil; import io.quarkus.deployment.builditem.RunTimeConfigurationDefaultBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.gizmo.ClassCreator; import io.quarkus.gizmo.ClassOutput; import io.quarkus.gizmo.FieldDescriptor; @@ -45,7 +50,28 @@ final class InterfaceConfigPropertiesUtil { - private InterfaceConfigPropertiesUtil() { + private final IndexView index; + private final YamlListObjectHandler yamlListObjectHandler; + private final ClassOutput classOutput; + private final ClassCreator classCreator; + private final Capabilities capabilities; + private final BuildProducer defaultConfigValues; + private final BuildProducer configProperties; + private final BuildProducer reflectiveClasses; + + InterfaceConfigPropertiesUtil(IndexView index, YamlListObjectHandler yamlListObjectHandler, ClassOutput classOutput, + ClassCreator classCreator, + Capabilities capabilities, BuildProducer defaultConfigValues, + BuildProducer configProperties, + BuildProducer reflectiveClasses) { + this.index = index; + this.yamlListObjectHandler = yamlListObjectHandler; + this.classOutput = classOutput; + this.classCreator = classCreator; + this.capabilities = capabilities; + this.defaultConfigValues = defaultConfigValues; + this.configProperties = configProperties; + this.reflectiveClasses = reflectiveClasses; } /** @@ -58,8 +84,7 @@ private InterfaceConfigPropertiesUtil() { * } * */ - static void addProducerMethodForInterfaceConfigProperties(DotName interfaceName, String prefix, boolean needsQualifier, - ClassCreator classCreator, + void addProducerMethodForInterfaceConfigProperties(DotName interfaceName, String prefix, boolean needsQualifier, GeneratedClass generatedClass) { String methodName = "produce" + interfaceName.withoutPackagePrefix(); if (needsQualifier) { @@ -84,21 +109,17 @@ static void addProducerMethodForInterfaceConfigProperties(DotName interfaceName, } } - static void generateImplementationForInterfaceConfigProperties(ClassInfo originalInterface, ClassOutput classOutput, - IndexView index, String prefixStr, ConfigProperties.NamingStrategy namingStrategy, - BuildProducer defaultConfigValues, - BuildProducer configProperties, + void generateImplementationForInterfaceConfigProperties(ClassInfo originalInterface, + String prefixStr, ConfigProperties.NamingStrategy namingStrategy, Map interfaceToGeneratedClass) { - generateImplementationForInterfaceConfigPropertiesRec(originalInterface, originalInterface, classOutput, index, - prefixStr, namingStrategy, defaultConfigValues, configProperties, interfaceToGeneratedClass); + generateImplementationForInterfaceConfigPropertiesRec(originalInterface, originalInterface, + prefixStr, namingStrategy, interfaceToGeneratedClass); } - private static void generateImplementationForInterfaceConfigPropertiesRec(ClassInfo originalInterface, - ClassInfo currentInterface, ClassOutput classOutput, - IndexView index, String prefixStr, ConfigProperties.NamingStrategy namingStrategy, - BuildProducer defaultConfigValues, - BuildProducer configProperties, + private String generateImplementationForInterfaceConfigPropertiesRec(ClassInfo originalInterface, + ClassInfo currentInterface, + String prefixStr, ConfigProperties.NamingStrategy namingStrategy, Map interfaceToGeneratedClass) { Set allInterfaces = new HashSet<>(); @@ -106,9 +127,11 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI collectInterfacesRec(currentInterface, index, allInterfaces); String generatedClassName = createName(currentInterface.name(), prefixStr); - // the generated class needs to be unremovable if it's not top-level interface since it will only be obtains via Arc.container().instance() in generated code - interfaceToGeneratedClass.put(currentInterface.name(), - new GeneratedClass(generatedClassName, !originalInterface.name().equals(currentInterface.name()))); + + // we only need to generate CDI producers for the top-level interface, not the sub-interfaces + if (originalInterface.name().equals(currentInterface.name())) { + interfaceToGeneratedClass.put(currentInterface.name(), new GeneratedClass(generatedClassName, true)); + } try (ClassCreator interfaceImplClassCreator = ClassCreator.builder().classOutput(classOutput) .interfaces(currentInterface.name().toString()).className(generatedClassName) @@ -133,7 +156,9 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI List methods = classInfo.methods(); for (MethodInfo method : methods) { Type returnType = method.returnType(); - if (isDefault(method.flags())) { // don't do anything with default methods + short methodModifiers = method.flags(); + if (isDefault(methodModifiers) || Modifier.isStatic(methodModifiers) + || Modifier.isPrivate(methodModifiers)) { continue; } if (!method.parameters().isEmpty()) { @@ -153,25 +178,25 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI if ((returnType.kind() == Type.Kind.CLASS)) { ClassInfo returnTypeClassInfo = index.getClassByName(returnType.name()); if ((returnTypeClassInfo != null) && Modifier.isInterface(returnTypeClassInfo.flags())) { - // when the return type is an interface, - // 1) we generate an implementation - // 2) retrieve the implementation from Arc - - generateImplementationForInterfaceConfigPropertiesRec(originalInterface, returnTypeClassInfo, - classOutput, - index, fullConfigName, namingStrategy, defaultConfigValues, configProperties, - interfaceToGeneratedClass); + String generatedSubInterfaceImp = generateImplementationForInterfaceConfigPropertiesRec( + originalInterface, returnTypeClassInfo, + fullConfigName, namingStrategy, interfaceToGeneratedClass); ResultHandle arcContainer = methodCreator .invokeStaticMethod( MethodDescriptor.ofMethod(Arc.class, "container", ArcContainer.class)); - ResultHandle instanceHandle = methodCreator.invokeInterfaceMethod( + ResultHandle configInstanceHandle = methodCreator.invokeInterfaceMethod( ofMethod(ArcContainer.class, "instance", InstanceHandle.class, Class.class, Annotation[].class), - arcContainer, methodCreator.loadClass(returnTypeClassInfo.name().toString()), + arcContainer, methodCreator.loadClass(Config.class), methodCreator.newArray(Annotation.class, 0)); - methodCreator.returnValue(methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(InstanceHandle.class, "get", Object.class), instanceHandle)); + ResultHandle config = methodCreator.invokeInterfaceMethod( + ofMethod(InstanceHandle.class, "get", Object.class), configInstanceHandle); + + ResultHandle interImpl = methodCreator + .newInstance(MethodDescriptor.ofConstructor(generatedSubInterfaceImp, + Config.class.getName()), config); + methodCreator.returnValue(interImpl); continue; } } @@ -196,6 +221,7 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI method.declaringClass().name()); if (genericType.kind() != Type.Kind.PARAMETERIZED_TYPE) { + registerImplicitConverter(genericType, reflectiveClasses); ResultHandle result = methodCreator.invokeInterfaceMethod( MethodDescriptor.ofMethod(Config.class, "getOptionalValue", Optional.class, String.class, @@ -220,6 +246,17 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI MethodDescriptor.ofMethod(Optional.class, "of", Optional.class, Object.class), readOptionalResponse.getValue())); } + } else if (ConfigPropertiesUtil.isListOfObject(method.returnType())) { + if (!capabilities.isPresent(Capability.CONFIG_YAML)) { + throw new DeploymentException( + "Support for List of objects in classes annotated with '@ConfigProperties' is only possible via the 'quarkus-config-yaml' extension. Offending method is '" + + method.name() + "' of interface '" + + method.declaringClass().name().toString()); + } + ResultHandle value = yamlListObjectHandler.handle( + new YamlListObjectHandler.MethodReturnTypeMember(method), methodCreator, config, + nameAndDefaultValue.getName(), fullConfigName); + methodCreator.returnValue(value); } else { if (defaultValueStr != null) { /* @@ -231,6 +268,7 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI .produce(new RunTimeConfigurationDefaultBuildItem(fullConfigName, defaultValueStr)); } // use config.getValue to obtain and return the result taking converting it to collection if needed + registerImplicitConverter(returnType, reflectiveClasses); ResultHandle value = createReadMandatoryValueAndConvertIfNeeded( fullConfigName, returnType, method.declaringClass().name(), methodCreator, config); @@ -244,6 +282,8 @@ private static void generateImplementationForInterfaceConfigPropertiesRec(ClassI } } } + + return generatedClassName; } private static void collectInterfacesRec(ClassInfo current, IndexView index, Set result) { diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/YamlListObjectHandler.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/YamlListObjectHandler.java new file mode 100644 index 0000000000000..8bfc071cc6f92 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/configproperties/YamlListObjectHandler.java @@ -0,0 +1,246 @@ +package io.quarkus.arc.deployment.configproperties; + +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.microprofile.config.spi.Converter; +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.AnnotationValue; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.FieldInfo; +import org.jboss.jandex.IndexView; +import org.jboss.jandex.MethodInfo; +import org.jboss.jandex.ParameterizedType; +import org.jboss.jandex.Type; + +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.bean.JavaBeanUtil; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.gizmo.ClassCreator; +import io.quarkus.gizmo.ClassOutput; +import io.quarkus.gizmo.FieldDescriptor; +import io.quarkus.gizmo.MethodCreator; +import io.quarkus.gizmo.MethodDescriptor; +import io.quarkus.gizmo.ResultHandle; +import io.smallrye.config.SmallRyeConfig; + +/** + * Class used to handle all the plumbing needed to support fields with types like {@code List} + * values for which can only be provided in YAML. + * + * The basic idea for handling these fields is to convert the string value of the field (which is SR Config + * populates with the "serialized" value of the field) using SnakeYAML. + * To achieve that various intermediate classes and Yaml configuration need to be generated. + */ +class YamlListObjectHandler { + + private static final String ABSTRACT_YAML_CONVERTER_CNAME = "io.quarkus.config.yaml.runtime.AbstractYamlObjectConverter"; + + private final ClassOutput classOutput; + private final IndexView index; + private final BuildProducer reflectiveClasses; + + public YamlListObjectHandler(ClassOutput classOutput, IndexView index, + BuildProducer reflectiveClasses) { + this.classOutput = classOutput; + this.index = index; + this.reflectiveClasses = reflectiveClasses; + } + + public ResultHandle handle(Member member, MethodCreator configPopulator, ResultHandle configObject, + String configName, String fullConfigName) { + ClassInfo classInfo = validateType(member.type()); + validateClass(classInfo, member); + // these need to be registered for reflection because SnakeYaml used reflection to instantiate them + reflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, classInfo.name().toString())); + + String wrapperClassName = classInfo.name().toString() + "_GeneratedListWrapper_" + configName; + + // generate a class that has a List field and getters and setter which have the proper generic type + // this way SnakeYaml can properly populate the field + MethodDescriptor getterDesc; + try (ClassCreator cc = ClassCreator.builder().classOutput(classOutput) + .className(wrapperClassName) + .build()) { + FieldDescriptor fieldDesc = cc.getFieldCreator(configName, List.class).setModifiers(Modifier.PRIVATE) + .getFieldDescriptor(); + + MethodCreator getter = cc.getMethodCreator(JavaBeanUtil.getGetterName(configName, classInfo.name()), List.class); + getter.setSignature(String.format("()Ljava/util/List;", forSignature(classInfo))); + getterDesc = getter.getMethodDescriptor(); + getter.returnValue(getter.readInstanceField(fieldDesc, getter.getThis())); + + MethodCreator setter = cc.getMethodCreator(JavaBeanUtil.getSetterName(configName), void.class, List.class); + setter.setSignature(String.format("(Ljava/util/List;)V", forSignature(classInfo))); + setter.writeInstanceField(fieldDesc, setter.getThis(), setter.getMethodParam(0)); + setter.returnValue(null); + } + // we always generate getters and setters, so reflection is only needed on the methods + reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, wrapperClassName)); + + // generate an MP-Config converter which looks something like this + // public class GeneratedInputsConverter extends io.quarkus.config.yaml.runtime.AbstractYamlObjectConverter { + // + // @Override + // Map getFieldNameMap() { + // Map result = new HashMap<>(); + // result.put("some_field", "someField"); + // return result; + // } + // + // @Override + // Class getClazz() { + // return SomeClass_GeneratedListWrapper_fieldName.class; + // } + // } + String wrapperConverterClassName = wrapperClassName + "_Converter"; + try (ClassCreator cc = ClassCreator.builder().classOutput(classOutput) + .className(wrapperConverterClassName) + .superClass(ABSTRACT_YAML_CONVERTER_CNAME) + .signature( + String.format("L%s;", ABSTRACT_YAML_CONVERTER_CNAME.replace('.', '/'), + wrapperClassName.replace('.', '/'))) + .build()) { + MethodCreator getClazz = cc.getMethodCreator("getClazz", Class.class).setModifiers(Modifier.PROTECTED); + getClazz.returnValue(getClazz.loadClass(wrapperClassName)); + + // generate the getFieldNameMap method by searching for fields annotated with @ConfigProperty + List configPropertyInstances = classInfo.annotations().get(DotNames.CONFIG_PROPERTY); + Map fieldNameMap = new HashMap<>(); + if (configPropertyInstances != null) { + for (AnnotationInstance instance : configPropertyInstances) { + if (instance.target().kind() != AnnotationTarget.Kind.FIELD) { + continue; + } + AnnotationValue nameValue = instance.value("name"); + if (nameValue != null) { + String nameValueStr = nameValue.asString(); + if ((nameValueStr != null) && !nameValueStr.isEmpty()) { + String annotatedFieldName = instance.target().asField().name(); + fieldNameMap.put(nameValueStr, annotatedFieldName); + } + } + } + } + if (!fieldNameMap.isEmpty()) { + MethodCreator getFieldNameMap = cc.getMethodCreator("getFieldNameMap", Map.class); + ResultHandle resultHandle = getFieldNameMap.newInstance(MethodDescriptor.ofConstructor(HashMap.class)); + for (Map.Entry entry : fieldNameMap.entrySet()) { + getFieldNameMap.invokeVirtualMethod( + MethodDescriptor.ofMethod(HashMap.class, "put", Object.class, Object.class, Object.class), + resultHandle, getFieldNameMap.load(entry.getKey()), getFieldNameMap.load(entry.getValue())); + } + getFieldNameMap.returnValue(resultHandle); + } + } + + // use the generated converter to convert the string value into the wrapper + ResultHandle smallryeConfig = configPopulator.checkCast(configObject, SmallRyeConfig.class); + ResultHandle getValueHandle = configPopulator.invokeVirtualMethod( + MethodDescriptor.ofMethod(SmallRyeConfig.class, "getValue", Object.class, String.class, Converter.class), + smallryeConfig, + configPopulator.load(fullConfigName), + configPopulator.newInstance(MethodDescriptor.ofConstructor(wrapperConverterClassName))); + ResultHandle wrapperHandle = configPopulator.checkCast(getValueHandle, wrapperClassName); + //pull the actual value out of the wrapper + return configPopulator.invokeVirtualMethod(getterDesc, wrapperHandle); + } + + private void validateClass(ClassInfo classInfo, Member member) { + if (Modifier.isInterface(classInfo.flags())) { + throw new IllegalArgumentException( + "The use of interfaces as the generic type of Lists fields / methods is not allowed. Offending field is '" + + member.name() + "' of class '" + member.declaringClass().name().toString() + "'"); + } + if (!classInfo.hasNoArgsConstructor()) { + throw new IllegalArgumentException( + String.format("Class '%s' which is used as %s in class '%s' must be have a no-args constructor", classInfo, + member.phraseUsage(), member.declaringClass().name().toString())); + } + if (!Modifier.isPublic(classInfo.flags())) { + throw new IllegalArgumentException( + String.format("Class '%s' which is used as %s in class '%s' must be a public class", classInfo, + member.phraseUsage(), member.declaringClass().name().toString())); + } + } + + private ClassInfo validateType(Type type) { + if (type.kind() != Type.Kind.PARAMETERIZED_TYPE) { + throw new IllegalArgumentException(ILLEGAL_ARGUMENT_MESSAGE); + } + ParameterizedType parameterizedType = (ParameterizedType) type; + if (!DotNames.LIST.equals(parameterizedType.name())) { + throw new IllegalArgumentException(ILLEGAL_ARGUMENT_MESSAGE); + } + if (parameterizedType.arguments().size() != 1) { + throw new IllegalArgumentException(ILLEGAL_ARGUMENT_MESSAGE); + } + ClassInfo classInfo = index.getClassByName(parameterizedType.arguments().get(0).name()); + if (classInfo == null) { + throw new IllegalArgumentException(ILLEGAL_ARGUMENT_MESSAGE); + } + return classInfo; + } + + private String forSignature(ClassInfo classInfo) { + return classInfo.name().toString().replace('.', '/'); + } + + /** + * An abstraction over Field and Method which we will use in order to keep the same code for Class and Interface cases + */ + static abstract class Member { + private final ClassInfo declaringClass; + private final Type type; + private final String name; + + protected abstract String phraseUsage(); + + public Member(ClassInfo declaringClass, Type type, String name) { + this.declaringClass = declaringClass; + this.type = type; + this.name = name; + } + + public ClassInfo declaringClass() { + return declaringClass; + } + + public Type type() { + return type; + } + + public String name() { + return name; + } + } + + static class FieldMember extends Member { + + public FieldMember(FieldInfo fieldInfo) { + super(fieldInfo.declaringClass(), fieldInfo.type(), fieldInfo.name()); + } + + @Override + protected String phraseUsage() { + return "field '" + name() + "'"; + } + } + + static class MethodReturnTypeMember extends Member { + + public MethodReturnTypeMember(MethodInfo methodInfo) { + super(methodInfo.declaringClass(), methodInfo.returnType(), methodInfo.name()); + } + + @Override + protected String phraseUsage() { + return "return type of method '" + name() + "'"; + } + } + + private static final String ILLEGAL_ARGUMENT_MESSAGE = "YamlListObjectHandler can only be used for fields / methods that are of type 'List' where 'SomeClass' is an application class"; +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfo.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfo.java new file mode 100644 index 0000000000000..85809f19c0bbd --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfo.java @@ -0,0 +1,140 @@ +package io.quarkus.arc.deployment.devconsole; + +import java.util.HashSet; +import java.util.Set; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.AnnotationTarget.Kind; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.FieldInfo; +import org.jboss.jandex.MethodInfo; +import org.jboss.jandex.Type; + +import io.quarkus.arc.deployment.CompletedApplicationClassPredicateBuildItem; +import io.quarkus.arc.processor.BeanInfo; +import io.quarkus.arc.processor.DotNames; + +public class DevBeanInfo implements Comparable { + + public static DevBeanInfo from(BeanInfo bean, CompletedApplicationClassPredicateBuildItem predicate) { + Set qualifiers = new HashSet<>(); + for (AnnotationInstance qualifier : bean.getQualifiers()) { + qualifiers.add(Name.from(qualifier)); + } + Set types = new HashSet<>(); + for (Type beanType : bean.getTypes()) { + types.add(Name.from(beanType)); + } + Name scope = Name.from(bean.getScope().getDotName()); + Name providerType = Name.from(bean.getProviderType()); + + if (bean.getTarget().isPresent()) { + AnnotationTarget target = bean.getTarget().get(); + DevBeanKind kind; + String memberName; + boolean isApplicationBean; + Name declaringClass; + if (target.kind() == Kind.METHOD) { + MethodInfo method = target.asMethod(); + memberName = method.name(); + kind = DevBeanKind.METHOD; + isApplicationBean = predicate.test(bean.getDeclaringBean().getBeanClass()); + declaringClass = Name.from(bean.getDeclaringBean().getBeanClass()); + } else if (target.kind() == Kind.FIELD) { + FieldInfo field = target.asField(); + memberName = field.name(); + kind = DevBeanKind.FIELD; + isApplicationBean = predicate.test(bean.getDeclaringBean().getBeanClass()); + declaringClass = Name.from(bean.getDeclaringBean().getBeanClass()); + } else if (target.kind() == Kind.CLASS) { + ClassInfo clazz = target.asClass(); + kind = DevBeanKind.CLASS; + memberName = null; + isApplicationBean = predicate.test(clazz.name()); + declaringClass = null; + } else { + throw new IllegalArgumentException("Invalid annotation target: " + target); + } + return new DevBeanInfo(kind, isApplicationBean, providerType, memberName, types, qualifiers, scope, declaringClass); + } else { + // Synthetic bean + return new DevBeanInfo(DevBeanKind.SYNTHETIC, false, providerType, null, types, qualifiers, scope, null); + } + } + + public DevBeanInfo(DevBeanKind kind, boolean isApplicationBean, Name providerType, String memberName, Set types, + Set qualifiers, Name scope, Name declaringClass) { + this.kind = kind; + this.isApplicationBean = isApplicationBean; + this.providerType = providerType; + this.memberName = memberName; + this.types = types; + this.qualifiers = qualifiers; + this.scope = scope; + this.declaringClass = declaringClass; + } + + private final DevBeanKind kind; + private final boolean isApplicationBean; + private final Name providerType; + private final String memberName; + private final Set types; + private final Set qualifiers; + private final Name scope; + private final Name declaringClass; + + public DevBeanKind getKind() { + return kind; + } + + public Name getScope() { + return scope; + } + + public Set getQualifiers() { + return qualifiers; + } + + public Set getNonDefaultQualifiers() { + Set nonDefault = new HashSet<>(); + String atDefault = DotNames.DEFAULT.toString(); + String atAny = DotNames.ANY.toString(); + for (Name qualifier : qualifiers) { + if (qualifier.toString().endsWith(atDefault) || qualifier.toString().endsWith(atAny)) { + continue; + } + nonDefault.add(qualifier); + } + return nonDefault; + } + + public Set getTypes() { + return types; + } + + public Name getProviderType() { + return providerType; + } + + public String getMemberName() { + return memberName; + } + + public boolean isApplicationBean() { + return isApplicationBean; + } + + public Name getDeclaringClass() { + return declaringClass; + } + + @Override + public int compareTo(DevBeanInfo o) { + // Application beans should go first + if (isApplicationBean == o.isApplicationBean) { + return providerType.compareTo(o.providerType); + } + return isApplicationBean ? -1 : 1; + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfos.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfos.java new file mode 100644 index 0000000000000..e9e7b6200de8e --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanInfos.java @@ -0,0 +1,48 @@ +package io.quarkus.arc.deployment.devconsole; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DevBeanInfos { + + private final List beans; + private final List removedBeans; + private final List observers; + + public DevBeanInfos() { + beans = new ArrayList<>(); + removedBeans = new ArrayList<>(); + observers = new ArrayList<>(); + } + + public List getRemovedBeans() { + return removedBeans; + } + + public List getBeans() { + return beans; + } + + public List getObservers() { + return observers; + } + + void addBean(DevBeanInfo beanInfo) { + beans.add(beanInfo); + } + + void addRemovedBean(DevBeanInfo beanInfo) { + removedBeans.add(beanInfo); + } + + void addObserver(DevObserverInfo observer) { + observers.add(observer); + } + + void sort() { + Collections.sort(beans); + Collections.sort(removedBeans); + Collections.sort(observers); + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanKind.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanKind.java new file mode 100644 index 0000000000000..e87ac408df65d --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevBeanKind.java @@ -0,0 +1,8 @@ +package io.quarkus.arc.deployment.devconsole; + +public enum DevBeanKind { + METHOD, + FIELD, + CLASS, + SYNTHETIC; +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevConsoleProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevConsoleProcessor.java new file mode 100644 index 0000000000000..f3235148f9b98 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevConsoleProcessor.java @@ -0,0 +1,118 @@ +package io.quarkus.arc.deployment.devconsole; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; + +import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem; +import io.quarkus.arc.deployment.ArcConfig; +import io.quarkus.arc.deployment.BeanDefiningAnnotationBuildItem; +import io.quarkus.arc.deployment.CompletedApplicationClassPredicateBuildItem; +import io.quarkus.arc.deployment.CustomScopeAnnotationsBuildItem; +import io.quarkus.arc.deployment.ValidationPhaseBuildItem; +import io.quarkus.arc.processor.AnnotationsTransformer; +import io.quarkus.arc.processor.BeanDeploymentValidator; +import io.quarkus.arc.processor.BeanInfo; +import io.quarkus.arc.processor.BuildExtension; +import io.quarkus.arc.processor.ObserverInfo; +import io.quarkus.arc.runtime.ArcContainerSupplier; +import io.quarkus.arc.runtime.ArcRecorder; +import io.quarkus.arc.runtime.BeanLookupSupplier; +import io.quarkus.arc.runtime.devconsole.EventsMonitor; +import io.quarkus.arc.runtime.devconsole.InvocationInterceptor; +import io.quarkus.arc.runtime.devconsole.InvocationTree; +import io.quarkus.arc.runtime.devconsole.InvocationsMonitor; +import io.quarkus.arc.runtime.devconsole.Monitored; +import io.quarkus.deployment.IsDevelopment; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.devconsole.spi.DevConsoleRuntimeTemplateInfoBuildItem; +import io.quarkus.devconsole.spi.DevConsoleTemplateInfoBuildItem; + +public class DevConsoleProcessor { + + @BuildStep(onlyIf = IsDevelopment.class) + @Record(ExecutionTime.STATIC_INIT) + public DevConsoleRuntimeTemplateInfoBuildItem exposeArcContainer(ArcRecorder recorder) { + return new DevConsoleRuntimeTemplateInfoBuildItem("arcContainer", + new ArcContainerSupplier()); + } + + @BuildStep(onlyIf = IsDevelopment.class) + void monitor(ArcConfig config, BuildProducer runtimeInfos, + BuildProducer beans, BuildProducer annotationTransformers, + CustomScopeAnnotationsBuildItem customScopes, + List beanDefiningAnnotations) { + if (!config.devMode.monitoringEnabled) { + return; + } + if (!config.transformUnproxyableClasses) { + throw new IllegalStateException( + "Dev UI problem: monitoring of CDI business method invocations not possible\n\t- quarkus.arc.transform-unproxyable-classes was set to false and therefore it would not be possible to apply interceptors to unproxyable bean classes\n\t- please disable the monitoring feature via quarkus.arc.dev-mode.monitoring-enabled=false or enable unproxyable classes transformation"); + } + // Events + runtimeInfos.produce( + new DevConsoleRuntimeTemplateInfoBuildItem("eventsMonitor", + new BeanLookupSupplier(EventsMonitor.class))); + beans.produce(AdditionalBeanBuildItem.unremovableOf(EventsMonitor.class)); + // Invocations + beans.produce(AdditionalBeanBuildItem.builder().setUnremovable() + .addBeanClasses(InvocationTree.class, InvocationsMonitor.class, InvocationInterceptor.class, + Monitored.class) + .build()); + Set skipNames = new HashSet<>(); + skipNames.add(DotName.createSimple(InvocationTree.class.getName())); + skipNames.add(DotName.createSimple(InvocationsMonitor.class.getName())); + skipNames.add(DotName.createSimple(EventsMonitor.class.getName())); + annotationTransformers.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { + @Override + public void transform(TransformationContext transformationContext) { + if (transformationContext.isClass()) { + ClassInfo beanClass = transformationContext.getTarget().asClass(); + if ((customScopes.isScopeDeclaredOn(beanClass) + || isAdditionalBeanDefiningAnnotationOn(beanClass, beanDefiningAnnotations)) + && !skipNames.contains(beanClass.name())) { + transformationContext.transform().add(Monitored.class).done(); + } + } + } + })); + runtimeInfos.produce(new DevConsoleRuntimeTemplateInfoBuildItem("invocationsMonitor", + new BeanLookupSupplier(InvocationsMonitor.class))); + } + + @BuildStep(onlyIf = IsDevelopment.class) + public DevConsoleTemplateInfoBuildItem collectBeanInfo(ValidationPhaseBuildItem validationPhaseBuildItem, + CompletedApplicationClassPredicateBuildItem predicate) { + BeanDeploymentValidator.ValidationContext validationContext = validationPhaseBuildItem.getContext(); + DevBeanInfos beanInfos = new DevBeanInfos(); + for (BeanInfo beanInfo : validationContext.beans()) { + beanInfos.addBean(DevBeanInfo.from(beanInfo, predicate)); + } + for (BeanInfo beanInfo : validationContext.removedBeans()) { + beanInfos.addRemovedBean(DevBeanInfo.from(beanInfo, predicate)); + } + for (ObserverInfo observerInfo : validationContext.get(BuildExtension.Key.OBSERVERS)) { + beanInfos.addObserver(DevObserverInfo.from(observerInfo, predicate)); + } + beanInfos.sort(); + return new DevConsoleTemplateInfoBuildItem("devBeanInfos", beanInfos); + } + + private boolean isAdditionalBeanDefiningAnnotationOn(ClassInfo beanClass, + List beanDefiningAnnotations) { + for (BeanDefiningAnnotationBuildItem beanDefiningAnnotation : beanDefiningAnnotations) { + if (beanClass.classAnnotation(beanDefiningAnnotation.getName()) != null) { + return true; + } + } + return false; + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevObserverInfo.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevObserverInfo.java new file mode 100644 index 0000000000000..d09374728e023 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/DevObserverInfo.java @@ -0,0 +1,120 @@ +package io.quarkus.arc.deployment.devconsole; + +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import javax.enterprise.event.Reception; +import javax.enterprise.event.TransactionPhase; + +import io.quarkus.arc.deployment.CompletedApplicationClassPredicateBuildItem; +import io.quarkus.arc.processor.ObserverInfo; + +public class DevObserverInfo implements Comparable { + + public static DevObserverInfo from(ObserverInfo observer, CompletedApplicationClassPredicateBuildItem predicate) { + List qualifiers; + if (observer.getQualifiers().isEmpty()) { + qualifiers = Collections.emptyList(); + } else { + qualifiers = observer.getQualifiers().stream().map(Name::from).collect(Collectors.toList()); + } + if (observer.getDeclaringBean() != null) { + return new DevObserverInfo(predicate.test(observer.getObserverMethod().declaringClass().name()), + Name.from(observer.getObserverMethod().declaringClass().name()), observer.getObserverMethod().name(), + Name.from(observer.getObservedType()), qualifiers, observer.getPriority(), observer.isAsync(), + observer.getReception(), observer.getTransactionPhase()); + } else { + return new DevObserverInfo(false, null, null, Name.from(observer.getObservedType()), qualifiers, + observer.getPriority(), observer.isAsync(), observer.getReception(), observer.getTransactionPhase()); + } + } + + private final boolean isApplicationObserver; + private final Name declaringClass; + private final String methodName; + private final Name observedType; + private List qualifiers; + private final int priority; + private final boolean isAsync; + private final Reception reception; + private final TransactionPhase transactionPhase; + + public DevObserverInfo(boolean isApplicationObserver, Name declaringClass, String methodName, Name observedType, + List qualifiers, int priority, boolean isAsync, Reception reception, TransactionPhase transactionPhase) { + this.isApplicationObserver = isApplicationObserver; + this.declaringClass = declaringClass; + this.methodName = methodName; + this.observedType = observedType; + this.qualifiers = qualifiers; + this.priority = priority; + this.isAsync = isAsync; + this.reception = reception; + this.transactionPhase = transactionPhase; + } + + public Name getDeclaringClass() { + return declaringClass; + } + + public String getMethodName() { + return methodName; + } + + public Name getObservedType() { + return observedType; + } + + public List getQualifiers() { + return qualifiers; + } + + public int getPriority() { + return priority; + } + + public boolean isAsync() { + return isAsync; + } + + public Reception getReception() { + return reception; + } + + public TransactionPhase getTransactionPhase() { + return transactionPhase; + } + + public boolean isApplicationObserver() { + return isApplicationObserver; + } + + @Override + public int compareTo(DevObserverInfo other) { + // Application observers should go first + int ret = 0; + if (isApplicationObserver != other.isApplicationObserver) { + ret = isApplicationObserver ? -1 : 1; + } else { + // Note that declaringClass and methodName are null for synthetic observers + if (declaringClass == null && other.declaringClass == null) { + // Synthetic observers + ret = observedType.compareTo(other.observedType); + } else { + // At this point we can be sure that at least one of the observers is not synthetic + if (declaringClass == null && other.declaringClass != null) { + ret = 1; + } else if (declaringClass != null && other.declaringClass == null) { + ret = -1; + } else { + ret = declaringClass.compareTo(other.declaringClass); + } + if (ret == 0) { + // Observers are not synthetic - method name must be present + ret = methodName.compareTo(other.methodName); + } + } + } + return ret; + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/Name.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/Name.java new file mode 100644 index 0000000000000..828c49f7ccb4e --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devconsole/Name.java @@ -0,0 +1,112 @@ +package io.quarkus.arc.deployment.devconsole; + +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationValue; +import org.jboss.jandex.DotName; +import org.jboss.jandex.ParameterizedType; +import org.jboss.jandex.Type; +import org.jboss.jandex.Type.Kind; + +public class Name implements Comparable { + + public static Name from(DotName dotName) { + return new Name(dotName.toString(), dotName.withoutPackagePrefix()); + } + + public static Name from(Type type) { + return new Name(type.toString(), createSimpleName(type)); + } + + public static Name from(AnnotationInstance annotation) { + return new Name(annotation.toString(false), createSimple(annotation)); + } + + private final String name; + private final String simpleName; + + private Name(String name, String simpleName) { + this.name = Objects.requireNonNull(name); + this.simpleName = simpleName; + } + + public Name(String name) { + this(name, null); + } + + public String getSimpleName() { + return simpleName != null ? simpleName : name; + } + + static String createSimpleName(Type type) { + switch (type.kind()) { + case CLASS: + return createSimple(type.name().toString()); + case PARAMETERIZED_TYPE: + return createSimple(type.asParameterizedType()); + case ARRAY: + Type component = type.asArrayType().component(); + if (component.kind() == Kind.CLASS) { + return createSimple(type.toString()); + } + default: + return null; + } + } + + static String createSimple(String name) { + int lastDot = name.lastIndexOf('.'); + if (lastDot != -1) { + return name.substring(lastDot + 1); + } + return name; + } + + static String createSimple(ParameterizedType parameterizedType) { + StringBuilder builder = new StringBuilder(); + builder.append(createSimple(parameterizedType.name().toString())); + List args = parameterizedType.arguments(); + if (!args.isEmpty()) { + builder.append('<'); + for (Iterator it = args.iterator(); it.hasNext();) { + builder.append(createSimpleName(it.next())); + if (it.hasNext()) { + builder.append(", "); + } + } + builder.append('>'); + } + return builder.toString(); + } + + static String createSimple(AnnotationInstance annotation) { + StringBuilder builder = new StringBuilder("@").append(createSimple(annotation.name().toString())); + List values = annotation.values(); + if (!values.isEmpty()) { + builder.append("("); + for (Iterator it = values.iterator(); it.hasNext();) { + builder.append(it.next()); + if (it.hasNext()) { + builder.append(","); + } + } + builder.append(')'); + } + + return builder.toString(); + } + + @Override + public int compareTo(Name other) { + return name.compareTo(other.name); + } + + @Override + public String toString() { + return name; + } + +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/staticmethods/InterceptedStaticMethodsProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/staticmethods/InterceptedStaticMethodsProcessor.java index f970b233e64e2..d04f45d0b6989 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/staticmethods/InterceptedStaticMethodsProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/staticmethods/InterceptedStaticMethodsProcessor.java @@ -170,12 +170,13 @@ void processInterceptedStaticMethods(BeanArchiveIndexBuildItem beanArchiveIndex, String initAllMethodName = "init_static_intercepted_methods"; for (Entry> entry : interceptedStaticMethodsMap.entrySet()) { - String packageName = DotNames.packageName(entry.getKey()); - String intializerName = packageName + "." + entry.getKey().withoutPackagePrefix() + INITIALIZER_CLASS_SUFFIX; + String packageName = DotNames.internalPackageNameWithTrailingSlash(entry.getKey()); + String intializerName = packageName.replace("/", ".") + entry.getKey().withoutPackagePrefix() + + INITIALIZER_CLASS_SUFFIX; initializers.put(entry.getKey(), intializerName); ClassCreator initializer = ClassCreator.builder().classOutput(classOutput) - .className(intializerName.replace('.', '/')).setFinal(true).build(); + .className(intializerName).setFinal(true).build(); List initMethods = new ArrayList<>(); for (InterceptedStaticMethodBuildItem interceptedStaticMethod : entry.getValue()) { diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/beans.html b/extensions/arc/deployment/src/main/resources/dev-templates/beans.html new file mode 100644 index 0000000000000..e00309161893b --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/beans.html @@ -0,0 +1,61 @@ +{#include main} + {#style} + .annotation { + color: gray; + font-style: italic; + } + span.larger-badge { + font-size: 0.9em; + } + span.app-class { + cursor:pointer; + color:blue; + text-decoration:underline; + } + + {/style} + + {#script} + $(document).ready(function(){ + if (!ideKnown()) { + return; + } + $(".class-candidate").each(function() { + var className = $(this).text(); + if (appClassLang(className)) { + $(this).addClass("app-class"); + } + }); + + $(".app-class").on("click", function() { + openInIDE($(this).text()); + }); + }); + + {/script} + + {#title}Beans{/title} + {#body} + + + + + + + + + + {#for bean in info:devBeanInfos.beans} + + + + + {/for} + +
#BeanKind
{count}. + {#display-bean bean/} + + {#bean-declaration bean/} +
+ {/body} +{/include} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/embedded.html b/extensions/arc/deployment/src/main/resources/dev-templates/embedded.html new file mode 100644 index 0000000000000..dd2fe654bde0d --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/embedded.html @@ -0,0 +1,21 @@ + + + Beans {info:devBeanInfos.beans.size} +
+ + + Observers {info:arcContainer.observers.size} +
+{#if config:property('quarkus.arc.dev-mode.monitoring-enabled') is "true"} + + + Fired Events +
+ + + Invocation Trees +
+{/if} + + + Removed Beans {info:arcContainer.removedBeans.size} \ No newline at end of file diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/events.html b/extensions/arc/deployment/src/main/resources/dev-templates/events.html new file mode 100644 index 0000000000000..e7d62ddc3fda1 --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/events.html @@ -0,0 +1,63 @@ +{#include main fluid=true} + {#style} + .nav-item { + padding: .5rem .3rem; + } + {/style} + {#title}Fired Events{/title} + {#body} +

+ + + + + + + + + + {#for event in info:eventsMonitor.lastEvents.orEmpty} + + + + + + {/for} + +
Timestamp Event TypeQualifiers
+ {event.timestamp} + + {event.type} + + {#when event.qualifiers.size} + {#is 1} + {event.qualifiers.iterator.next} + {#is > 1} +
    + {#for q in event.qualifiers} +
  • {q}
  • + {/for} +
+ {/when} +
+ {/body} +{/include} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/invocations.html b/extensions/arc/deployment/src/main/resources/dev-templates/invocations.html new file mode 100644 index 0000000000000..320bf44699109 --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/invocations.html @@ -0,0 +1,91 @@ +{#include main fluid=true} + {#style} + .nav-item { + padding: .5rem .3rem; + } + ul#tree, ul.nested { + list-style-type: none; + } + #tree { + margin: 0; + padding: 0; + } + span.caret { + cursor: pointer; + user-select: none; + } + span.caret::before { + content: "\229E"; + color: black; + display: inline-block; + margin-right: 6px; + } + span.caret-down::before { + content: "\229F" + /*transform: rotate(90deg);*/ + } + ul.nested { + display: none; + } + ul.active { + display: block; + } + ul code { + color: #343a40; + } + ul li { + margin-top: 5px; + } + span.declaring-class { + color: gray; + } + {/style} + {#script} + var carets = document.getElementsByClassName("caret"); + for (i = 0; i < carets.length; i++) { + carets[i].addEventListener("click", function() { + this.parentElement.querySelector(".nested").classList.toggle("active"); + this.classList.toggle("caret-down"); + }); + } + {/script} + {#title}Invocation Trees{/title} + {#body} + + + + + + + + + + {#each info:invocationsMonitor.lastInvocations.orEmpty} + + + + + {/each} + +
Start Invocations
{it.startFormatted}
    {#tree it root=true /}
+ {/body} +{/include} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/observers.html b/extensions/arc/deployment/src/main/resources/dev-templates/observers.html new file mode 100644 index 0000000000000..b378d9a7f56bf --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/observers.html @@ -0,0 +1,59 @@ +{#include main fluid=true} + {#style} + .annotation { + color: gray; + font-style: italic; + } + span.larger-badge { + font-size: 0.9em; + } + {/style} + {#title}Observers{/title} + {#body} + + + + + + + + + + + + + + {#for observer in info:devBeanInfos.observers} + + + + + + + + + + {/for} + +
#SourceObserved Type/QualifiersPriorityReceptionTransaction PhaseAsync
{count}. + {#if observer.declaringClass} + {observer.declaringClass}#{observer.methodName}() + {#else} + Synthetic + {/if} + + {#each observer.qualifiers} + {it.simpleName} + {/each} + {observer.observedType} + + {observer.priority} + + {observer.reception} + + {observer.transactionPhase} + + {observer.async} +
+ {/body} +{/include} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/removed-beans.html b/extensions/arc/deployment/src/main/resources/dev-templates/removed-beans.html new file mode 100644 index 0000000000000..9cae8bf25ac30 --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/removed-beans.html @@ -0,0 +1,35 @@ +{#include main} + {#style} + .annotation { + color: gray; + font-style: italic; + } + span.larger-badge { + font-size: 0.9em; + } + {/style} + {#title}Removed Beans{/title} + {#body} + + + + + + + + + + {#for bean in info:devBeanInfos.removedBeans} + + + + + {/for} + +
#BeanKind
{count}. + {#display-bean bean/} + + {#bean-declaration bean/} +
+ {/body} +{/include} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/tags/bean-declaration.html b/extensions/arc/deployment/src/main/resources/dev-templates/tags/bean-declaration.html new file mode 100644 index 0000000000000..473affc152f1b --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/tags/bean-declaration.html @@ -0,0 +1,10 @@ +{#switch it.kind} + {#case METHOD} + Producer method
{it.declaringClass.simpleName}.{it.memberName}() + {#case FIELD} + Producer field
{it.declaringClass.simpleName}.{it.memberName} + {#case CLASS} + Class + {#else} + Synthetic +{/switch} \ No newline at end of file diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/tags/display-bean.html b/extensions/arc/deployment/src/main/resources/dev-templates/tags/display-bean.html new file mode 100644 index 0000000000000..a29c121a90abb --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/tags/display-bean.html @@ -0,0 +1,5 @@ +@{it.scope.simpleName}
+{#for q in it.nonDefaultQualifiers} +{q.simpleName}
+{/for} +{it.providerType} diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/tags/duration.html b/extensions/arc/deployment/src/main/resources/dev-templates/tags/duration.html new file mode 100644 index 0000000000000..adb73e72277ab --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/tags/duration.html @@ -0,0 +1,3 @@ + + {#if it.durationMillis > 0}{it.durationMillis} ms{#else}< 1 ms{/if} + \ No newline at end of file diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/tags/kind.html b/extensions/arc/deployment/src/main/resources/dev-templates/tags/kind.html new file mode 100644 index 0000000000000..078a39546f5be --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/tags/kind.html @@ -0,0 +1,8 @@ +{#when it.kind} + {#is PRODUCER} +Producer + {#is DISPOSER} +Disposer + {#is OBSERVER} +Observer +{/when} \ No newline at end of file diff --git a/extensions/arc/deployment/src/main/resources/dev-templates/tags/tree.html b/extensions/arc/deployment/src/main/resources/dev-templates/tags/tree.html new file mode 100644 index 0000000000000..211f0446aaa4f --- /dev/null +++ b/extensions/arc/deployment/src/main/resources/dev-templates/tags/tree.html @@ -0,0 +1,15 @@ +
  • +{#if !root} +{#if next}├─{#else}└─{/if} +{/if} +{#if it.children} +{it.declaringClassName}#{it.method.name}() {#duration it /}{#kind it /}{#if it.message != null} Error{/if} +
      +{#each it.children} +{#tree it root=false next=hasNext /} +{/each} +
    +{#else} +{it.declaringClassName}#{it.method.name}() {#duration it /}{#kind it /}{#if it.message != null} Error{/if} +{/if} +
  • \ No newline at end of file diff --git a/extensions/arc/deployment/src/test/java/B.java b/extensions/arc/deployment/src/test/java/B.java new file mode 100644 index 0000000000000..5194ebda21cdd --- /dev/null +++ b/extensions/arc/deployment/src/test/java/B.java @@ -0,0 +1,21 @@ +import java.util.Optional; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.context.control.ActivateRequestContext; +import javax.inject.Inject; + +import org.eclipse.microprofile.config.inject.ConfigProperty; + +@ApplicationScoped +public class B { + + @Inject + @ConfigProperty(name = "simpleBean.baz") + Optional bazOptional; + + @ActivateRequestContext + String ping() { + return bazOptional.orElse("bang!"); + } + +} diff --git a/extensions/arc/deployment/src/test/java/C.java b/extensions/arc/deployment/src/test/java/C.java new file mode 100644 index 0000000000000..aa51f51320b50 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/C.java @@ -0,0 +1,17 @@ +import javax.inject.Inject; + +@javax.inject.Singleton +public class C { + + private B b; + + @Inject + void setB(B b) { + this.b = b; + } + + String ping() { + return b.ping(); + } + +} diff --git a/extensions/arc/deployment/src/test/java/L.java b/extensions/arc/deployment/src/test/java/L.java new file mode 100644 index 0000000000000..f385d890d5113 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/L.java @@ -0,0 +1,14 @@ +@javax.inject.Singleton +public class L { + + private final C c; + + L(C c) { + this.c = c; + } + + String ping() { + return c.ping(); + } + +} diff --git a/extensions/arc/deployment/src/test/java/SingleLetterDefaultPackageTest.java b/extensions/arc/deployment/src/test/java/SingleLetterDefaultPackageTest.java new file mode 100644 index 0000000000000..a1bb0783b04b7 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/SingleLetterDefaultPackageTest.java @@ -0,0 +1,37 @@ +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class SingleLetterDefaultPackageTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(B.class, C.class, L.class) + .addAsResource(new StringAsset("simpleBean.baz=1"), "application.properties")); + + @Inject + B b; + + @Inject + C c; + + @Inject + L l; + + @Test + public void testB() { + assertEquals("1", b.ping()); + assertEquals(c.ping(), b.ping()); + assertEquals(l.ping(), b.ping()); + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/autoproduces/AutoProducerMethodTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/autoproduces/AutoProducerMethodTest.java index 56bec4d7ea3b4..c4cc4fa6ce2c3 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/autoproduces/AutoProducerMethodTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/autoproduces/AutoProducerMethodTest.java @@ -79,6 +79,11 @@ List strings() { return Collections.emptyList(); } + // void methods should be ignored + @MyQualifier + void ignored() { + } + } @Qualifier diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigDefaultValuesTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigDefaultValuesTest.java new file mode 100644 index 0000000000000..5ff2078ff58d5 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigDefaultValuesTest.java @@ -0,0 +1,66 @@ +package io.quarkus.arc.test.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import javax.inject.Inject; + +import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.spi.ConfigSource; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class ConfigDefaultValuesTest { + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset( + "config_ordinal=1000\n" + + "my.prop=1234\n" + + "%prod.my.prop=1234\n" + + "%dev.my.prop=5678\n" + + "%test.my.prop=1234"), + "application.properties")); + @Inject + Config config; + + @Test + void configDefaultValues() { + ConfigSource defaultValues = getConfigSourceByName("PropertiesConfigSource[source=Specified default values]"); + assertNotNull(defaultValues); + assertEquals(Integer.MIN_VALUE + 100, defaultValues.getOrdinal()); + + // Should be the first + ConfigSource applicationProperties = config.getConfigSources().iterator().next(); + assertNotNull(applicationProperties); + assertEquals(1000, applicationProperties.getOrdinal()); + + assertEquals("1234", defaultValues.getValue("my.prop")); + assertEquals("1234", applicationProperties.getValue("my.prop")); + } + + @Test + void profileDefaultValues() { + ConfigSource defaultValues = getConfigSourceByName("PropertiesConfigSource[source=Specified default values]"); + assertNotNull(defaultValues); + assertEquals("1234", defaultValues.getValue("my.prop")); + assertEquals("1234", defaultValues.getValue("%prod.my.prop")); + assertEquals("5678", defaultValues.getValue("%dev.my.prop")); + assertEquals("1234", defaultValues.getValue("%test.my.prop")); + assertEquals("1234", config.getValue("my.prop", String.class)); + } + + private ConfigSource getConfigSourceByName(String name) { + for (ConfigSource configSource : config.getConfigSources()) { + if (configSource.getName().contains(name)) { + return configSource; + } + } + return null; + } +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigMappingTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigMappingTest.java new file mode 100644 index 0000000000000..8620e9ebbc4e5 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigMappingTest.java @@ -0,0 +1,293 @@ +package io.quarkus.arc.test.config; + +import static java.util.stream.Collectors.toList; +import static java.util.stream.StreamSupport.stream; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.stream.Stream; + +import javax.inject.Inject; + +import org.eclipse.microprofile.config.Config; +import org.eclipse.microprofile.config.spi.Converter; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.ConfigValue; +import io.smallrye.config.SmallRyeConfig; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; + +public class ConfigMappingTest { + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset("config.my.prop=1234\n" + + "group.host=localhost\n" + + "group.port=8080\n" + + "types.int=9\n" + + "types.long=9999999999\n" + + "types.float=99.9\n" + + "types.double=99.99\n" + + "types.char=c\n" + + "types.boolean=true\n" + + "types.value=1234\n" + + "optionals.server.host=localhost\n" + + "optionals.server.port=8080\n" + + "optionals.optional=optional\n" + + "optionals.optional.int=9\n" + + "collections.strings=foo,bar\n" + + "collections.ints=1,2,3\n" + + "maps.server.host=localhost\n" + + "maps.server.port=8080\n" + + "maps.group.server.host=localhost\n" + + "maps.group.server.port=8080\n" + + "converters.foo=notbar\n" + + "override.server.host=localhost\n" + + "override.server.port=8080\n" + + "cloud.server.host=cloud\n" + + "cloud.server.port=9000\n"), "application.properties")); + @Inject + Config config; + + @ConfigMapping(prefix = "config") + public interface MyConfigMapping { + @WithName("my.prop") + String myProp(); + } + + @Inject + MyConfigMapping myConfigMapping; + + @Test + void configMapping() { + SmallRyeConfig smallRyeConfig = ((SmallRyeConfig) config); + MyConfigMapping configMapping = smallRyeConfig.getConfigMapping(MyConfigMapping.class); + assertNotNull(configMapping); + assertEquals("1234", configMapping.myProp()); + + assertNotNull(myConfigMapping); + assertEquals("1234", myConfigMapping.myProp()); + } + + @ConfigMapping(prefix = "group") + public interface GroupMapping { + @WithParentName + ServerHost host(); + + @WithParentName + ServerPort port(); + + interface ServerHost { + String host(); + } + + interface ServerPort { + int port(); + } + } + + @Inject + GroupMapping groupMapping; + + @Test + void groups() { + assertNotNull(groupMapping); + assertEquals("localhost", groupMapping.host().host()); + assertEquals(8080, groupMapping.port().port()); + } + + @ConfigMapping(prefix = "types") + public interface SomeTypes { + @WithName("int") + int intPrimitive(); + + @WithName("int") + Integer intWrapper(); + + @WithName("long") + long longPrimitive(); + + @WithName("long") + Long longWrapper(); + + @WithName("float") + float floatPrimitive(); + + @WithName("float") + Float floatWrapper(); + + @WithName("double") + double doublePrimitive(); + + @WithName("double") + Double doubleWrapper(); + + @WithName("char") + char charPrimitive(); + + @WithName("char") + Character charWrapper(); + + @WithName("boolean") + boolean booleanPrimitive(); + + @WithName("boolean") + Boolean booleanWrapper(); + + @WithName("value") + ConfigValue configValue(); + } + + @Inject + SomeTypes types; + + @Test + void types() { + assertNotNull(types); + assertEquals(9, types.intPrimitive()); + assertEquals(9, types.intWrapper()); + assertEquals(9999999999L, types.longPrimitive()); + assertEquals(9999999999L, types.longWrapper()); + assertEquals(99.9f, types.floatPrimitive()); + assertEquals(99.9f, types.floatWrapper()); + assertEquals(99.99, types.doublePrimitive()); + assertEquals(99.99, types.doubleWrapper()); + assertEquals('c', types.charPrimitive()); + assertEquals('c', types.charWrapper()); + assertTrue(types.booleanPrimitive()); + assertTrue(types.booleanWrapper()); + assertEquals("1234", types.configValue().getValue()); + } + + @ConfigMapping(prefix = "optionals") + public interface Optionals { + Optional server(); + + Optional optional(); + + @WithName("optional.int") + OptionalInt optionalInt(); + + interface Server { + String host(); + + int port(); + } + } + + @Inject + Optionals optionals; + + @Test + void optionals() { + assertTrue(optionals.server().isPresent()); + assertEquals("localhost", optionals.server().get().host()); + assertEquals(8080, optionals.server().get().port()); + + assertTrue(optionals.optional().isPresent()); + assertEquals("optional", optionals.optional().get()); + assertTrue(optionals.optionalInt().isPresent()); + assertEquals(9, optionals.optionalInt().getAsInt()); + } + + @ConfigMapping(prefix = "collections") + public interface Collections { + @WithName("strings") + List listStrings(); + + @WithName("ints") + List listInts(); + } + + @Inject + Collections collections; + + @Test + void collections() { + assertEquals(Stream.of("foo", "bar").collect(toList()), collections.listStrings()); + assertEquals(Stream.of(1, 2, 3).collect(toList()), collections.listInts()); + } + + @ConfigMapping(prefix = "maps") + public interface Maps { + Map server(); + + Map group(); + + interface Server { + String host(); + + int port(); + } + } + + @Inject + Maps maps; + + @Test + void maps() { + assertEquals("localhost", maps.server().get("host")); + assertEquals(8080, Integer.valueOf(maps.server().get("port"))); + + assertEquals("localhost", maps.group().get("server").host()); + assertEquals(8080, maps.group().get("server").port()); + } + + @ConfigMapping(prefix = "defaults") + public interface Defaults { + @WithDefault("foo") + String foo(); + + @WithDefault("bar") + String bar(); + } + + @Inject + Defaults defaults; + + @Test + void defaults() { + assertEquals("foo", defaults.foo()); + assertEquals("bar", defaults.bar()); + assertEquals("foo", config.getValue("defaults.foo", String.class)); + + final List propertyNames = stream(config.getPropertyNames().spliterator(), false).collect(toList()); + assertFalse(propertyNames.contains("defaults.foo")); + } + + @ConfigMapping(prefix = "converters") + public interface Converters { + @WithConverter(FooBarConverter.class) + String foo(); + + class FooBarConverter implements Converter { + @Override + public String convert(final String value) { + return "bar"; + } + } + } + + @Inject + Converters converters; + + @Test + void converters() { + assertEquals("bar", converters.foo()); + } +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigOptionalsTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigOptionalsTest.java new file mode 100644 index 0000000000000..183fb4014dd9a --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigOptionalsTest.java @@ -0,0 +1,114 @@ +package io.quarkus.arc.test.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Optional; +import java.util.OptionalDouble; +import java.util.OptionalInt; +import java.util.OptionalLong; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class ConfigOptionalsTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(UsingOptionals.class) + .addAsResource(new StringAsset("foo=42\nbar=4.2"), "application.properties")); + + @Inject + UsingOptionals usingOptionals; + + @Test + public void testOptionals() { + assertTrue(usingOptionals.optionalOfInteger.isPresent()); + assertEquals(42, usingOptionals.optionalOfInteger.get()); + + assertTrue(usingOptionals.optionalOfLong.isPresent()); + assertEquals(42, usingOptionals.optionalOfLong.get()); + + assertTrue(usingOptionals.optionalOfDouble.isPresent()); + assertEquals(4.2, usingOptionals.optionalOfDouble.get()); + + assertTrue(usingOptionals.optionalInt.isPresent()); + assertEquals(42, usingOptionals.optionalInt.getAsInt()); + + assertTrue(usingOptionals.optionalLong.isPresent()); + assertEquals(42, usingOptionals.optionalLong.getAsLong()); + + assertTrue(usingOptionals.optionalDouble.isPresent()); + assertEquals(4.2, usingOptionals.optionalDouble.getAsDouble()); + + assertFalse(usingOptionals.missingOptionalOfInteger.isPresent()); + assertFalse(usingOptionals.missingOptionalOfLong.isPresent()); + assertFalse(usingOptionals.missingOptionalOfDouble.isPresent()); + assertFalse(usingOptionals.missingOptionalInt.isPresent()); + assertFalse(usingOptionals.missingOptionalLong.isPresent()); + assertFalse(usingOptionals.missingOptionalDouble.isPresent()); + } + + @Singleton + static class UsingOptionals { + @Inject + @ConfigProperty(name = "foo") + Optional optionalOfInteger; + + @Inject + @ConfigProperty(name = "foo") + Optional optionalOfLong; + + @Inject + @ConfigProperty(name = "bar") + Optional optionalOfDouble; + + @Inject + @ConfigProperty(name = "foo") + OptionalInt optionalInt; + + @Inject + @ConfigProperty(name = "foo") + OptionalLong optionalLong; + + @Inject + @ConfigProperty(name = "bar") + OptionalDouble optionalDouble; + + @Inject + @ConfigProperty(name = "missing") + Optional missingOptionalOfInteger; + + @Inject + @ConfigProperty(name = "missing") + Optional missingOptionalOfLong; + + @Inject + @ConfigProperty(name = "missing") + Optional missingOptionalOfDouble; + + @Inject + @ConfigProperty(name = "missing") + OptionalInt missingOptionalInt; + + @Inject + @ConfigProperty(name = "missing") + OptionalLong missingOptionalLong; + + @Inject + @ConfigProperty(name = "missing") + OptionalDouble missingOptionalDouble; + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/ClassWithAllPublicFieldsConfigPropertiesTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/ClassWithAllPublicFieldsConfigPropertiesTest.java index 9a81c7c799821..433d8a2fff1f7 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/ClassWithAllPublicFieldsConfigPropertiesTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/ClassWithAllPublicFieldsConfigPropertiesTest.java @@ -11,6 +11,7 @@ import javax.inject.Inject; import javax.inject.Singleton; +import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; @@ -27,7 +28,7 @@ public class ClassWithAllPublicFieldsConfigPropertiesTest { .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(DummyBean.class, DummyProperties.class) .addAsResource(new StringAsset( - "dummy.name=quarkus\ndummy.numbers=1,2,3,4\ndummy.bool-with-default=true\ndummy.optional-int=100\ndummy.optional-int-list=1,2"), + "dummy.name=quarkus\ndummy.numbers=1,2,3,4\ndummy.bool-with-default=true\ndummy.optional-int=100\ndummy.opt-int-lst=1,2"), "application.properties")); @Inject @@ -39,6 +40,7 @@ public void testConfiguredValues() { assertEquals(Arrays.asList(1, 2, 3, 4), dummyBean.getNumbers()); assertEquals("default", dummyBean.getUnset()); assertTrue(dummyBean.isBoolWithDefault()); + assertFalse(dummyBean.isUnsetBoolean()); assertTrue(dummyBean.getOptionalInt().isPresent()); assertEquals(100, dummyBean.getOptionalInt().get()); assertFalse(dummyBean.getOptionalString().isPresent()); @@ -68,6 +70,10 @@ boolean isBoolWithDefault() { return dummyProperties.boolWithDefault; } + boolean isUnsetBoolean() { + return dummyProperties.unsetBoolean; + } + Optional getOptionalInt() { return dummyProperties.optionalInt; } @@ -91,10 +97,13 @@ public static class DummyProperties { public String name; public String unset = "default"; public boolean boolWithDefault = false; + public boolean unsetBoolean; public List numbers; public Optional optionalInt; public Optional optionalString; + @ConfigProperty(name = "opt-str-lst") public Optional> optionalStringList; + @ConfigProperty(name = "opt-int-lst") public Optional> optionalIntList; } } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/InterfaceWithNestedInterfaceConfigPrefixTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/InterfaceWithNestedInterfaceConfigPrefixTest.java index 10ef058906694..2f59a91048431 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/InterfaceWithNestedInterfaceConfigPrefixTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/InterfaceWithNestedInterfaceConfigPrefixTest.java @@ -27,7 +27,7 @@ public class InterfaceWithNestedInterfaceConfigPrefixTest { .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(DummyBean.class, DummyProperties.class, DummyProperties.NestedDummy.class) .addAsResource(new StringAsset( - "dummy.name=quarkus\ndummy.boolWD=true\ndummy.nested-dummy.foo=bar\ndummy.nested-dummy.other=some"), + "dummy.name=quarkus\ndummy.boolWD=true\ndummy.nested-dummy.foo=bar\ndummy.nested-dummy.other=some\ndummy.other-nested.foo=bar2\ndummy.other-nested.other=some2"), "application.properties")); @Inject @@ -37,11 +37,18 @@ public class InterfaceWithNestedInterfaceConfigPrefixTest { public void testConfiguredValues() { assertEquals("quarkus", dummyBean.dummyProperties.getName()); assertTrue(dummyBean.dummyProperties.isBoolWithDef()); + DummyProperties.NestedDummy nestedDummy = dummyBean.dummyProperties.getNestedDummy(); assertNotNull(nestedDummy); assertEquals("some", nestedDummy.getOther()); assertEquals("bar", nestedDummy.getFooBar()); assertEquals(Arrays.asList(1, 2, 3), nestedDummy.getNumbers()); + + DummyProperties.NestedDummy otherNested = dummyBean.dummyProperties.getOtherNested(); + assertNotNull(otherNested); + assertEquals("some2", otherNested.getOther()); + assertEquals("bar2", otherNested.getFooBar()); + assertEquals(Arrays.asList(1, 2, 3), otherNested.getNumbers()); } @Singleton @@ -61,6 +68,8 @@ public interface DummyProperties { NestedDummy getNestedDummy(); + NestedDummy getOtherNested(); + interface NestedDummy { @ConfigProperty(defaultValue = "1,2,3") diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/MultipleInterfaceConfigPrefixTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/MultipleInterfaceConfigPrefixTest.java index 42e904e5a4b82..736992ba740c8 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/MultipleInterfaceConfigPrefixTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/MultipleInterfaceConfigPrefixTest.java @@ -88,5 +88,9 @@ public interface DummyProperties { default String nameWithSuffix() { return getFirstName() + "!"; } + + static void someStatic() { + + } } } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassAndSuperclassConfigPropertiesTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassAndSuperclassConfigPropertiesTest.java index 3bb92c52c6b99..8b493eea6e782 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassAndSuperclassConfigPropertiesTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassAndSuperclassConfigPropertiesTest.java @@ -9,6 +9,7 @@ import javax.inject.Inject; import javax.inject.Singleton; +import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; @@ -26,7 +27,7 @@ public class NestedClassAndSuperclassConfigPropertiesTest { .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(DummyBean.class, DummyProperties.class, DummyProperties.NestedDummyProperties.class) .addAsResource(new StringAsset( - "dummy.lastname=redhat\ndummy.name=quarkus\ndummy.nested.ages=1,2,3,4\ndummy.supernested.heights=100,200\ndummy.unused=whatever\ndummy.nested.unused=whatever2"), + "dummy.lname=redhat\ndummy.name=quarkus\ndummy.nested.ages=1,2,3,4\ndummy.supernested.afraid-of-heights=100,200\ndummy.unused=whatever\ndummy.nested.unused=whatever2"), "application.properties")); @Inject @@ -78,6 +79,7 @@ public static class NestedDummyProperties { } public static class SuperDummyProperties { + @ConfigProperty(name = "lname") private String lastname; private NestedSuperDummyProperties supernested; @@ -99,6 +101,7 @@ public void setSupernested(NestedSuperDummyProperties supernested) { public static class NestedSuperDummyProperties { + @ConfigProperty(name = "afraid-of-heights") public Set heights; @ConfigIgnore public Integer ignored; diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassConfigPropertiesTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassConfigPropertiesTest.java index 0211647d0622d..7738d4ef3f6cf 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassConfigPropertiesTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/configproperties/NestedClassConfigPropertiesTest.java @@ -8,6 +8,7 @@ import javax.inject.Inject; import javax.inject.Singleton; +import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; @@ -24,7 +25,7 @@ public class NestedClassConfigPropertiesTest { .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(DummyBean.class, DummyProperties.class, DummyProperties.NestedDummyProperties.class) .addAsResource(new StringAsset( - "dummy.name=quarkus\ndummy.nested.ages=1,2,3,4\ndummy.unused=whatever\ndummy.nested.unused=whatever2"), + "dummy.name=quarkus\ndummy.nested.rock-of-ages=1,2,3,4\ndummy.unused=whatever\ndummy.nested.unused=whatever2"), "application.properties")); @Inject @@ -70,6 +71,7 @@ public void setName(String name) { public static class NestedDummyProperties { + @ConfigProperty(name = "rock-of-ages") public Set ages; } } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/DevObserverInfoTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/DevObserverInfoTest.java new file mode 100644 index 0000000000000..ca364db576e2f --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/DevObserverInfoTest.java @@ -0,0 +1,49 @@ +package io.quarkus.arc.test.devconsole; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import javax.enterprise.event.Reception; +import javax.enterprise.event.TransactionPhase; + +import org.junit.jupiter.api.Test; + +import io.quarkus.arc.deployment.devconsole.DevObserverInfo; +import io.quarkus.arc.deployment.devconsole.Name; + +public class DevObserverInfoTest { + + @Test + public void testCompare() { + List observers = new ArrayList<>(); + // Synthetic non-app - should be last + observers.add(new DevObserverInfo(false, null, null, new Name("Delta"), + Collections.emptyList(), 0, false, Reception.ALWAYS, TransactionPhase.IN_PROGRESS)); + // App observers + observers.add(new DevObserverInfo(true, new Name("Alpha"), "fooish", new Name("java.lang.String"), + Collections.emptyList(), 0, false, Reception.ALWAYS, TransactionPhase.IN_PROGRESS)); + observers.add(new DevObserverInfo(true, new Name("Alpha"), "blabla", new Name("java.lang.String"), + Collections.emptyList(), 1, false, Reception.ALWAYS, TransactionPhase.IN_PROGRESS)); + observers.add(new DevObserverInfo(true, null, null, new Name("Charlie"), + Collections.emptyList(), 0, false, Reception.ALWAYS, TransactionPhase.IN_PROGRESS)); + observers.add(new DevObserverInfo(true, new Name("Bravo"), "hop", new Name("java.lang.String"), + Collections.emptyList(), 0, false, Reception.IF_EXISTS, TransactionPhase.IN_PROGRESS)); + + Collections.sort(observers); + assertEquals("blabla", observers.get(0).getMethodName()); + assertEquals("Alpha", observers.get(0).getDeclaringClass().toString()); + assertEquals("fooish", observers.get(1).getMethodName()); + assertEquals("Alpha", observers.get(1).getDeclaringClass().toString()); + assertEquals("hop", observers.get(2).getMethodName()); + assertEquals("Bravo", observers.get(2).getDeclaringClass().toString()); + assertNull(observers.get(3).getMethodName()); + assertEquals("Charlie", observers.get(3).getObservedType().toString()); + assertEquals("Delta", observers.get(observers.size() - 1).getObservedType().toString()); + assertNull(observers.get(observers.size() - 1).getDeclaringClass()); + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/NameTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/NameTest.java new file mode 100644 index 0000000000000..dba9292e95acc --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/devconsole/NameTest.java @@ -0,0 +1,67 @@ +package io.quarkus.arc.test.devconsole; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationValue; +import org.jboss.jandex.ArrayType; +import org.jboss.jandex.DotName; +import org.jboss.jandex.ParameterizedType; +import org.jboss.jandex.PrimitiveType; +import org.jboss.jandex.Type; +import org.jboss.jandex.Type.Kind; +import org.junit.jupiter.api.Test; + +import io.quarkus.arc.deployment.devconsole.Name; + +public class NameTest { + + @Test + public void testFromDotName() { + assertName(Name.from(DotName.createSimple("org.acme.Foo")), "org.acme.Foo", "Foo"); + assertName(Name.from(DotName.createSimple("org.acme.Foo$Lu")), "org.acme.Foo$Lu", "Foo$Lu"); + assertName(Name.from(PrimitiveType.BYTE.name()), "byte", "byte"); + } + + @Test + public void testFromType() { + assertName(Name.from(Type.create(DotName.createSimple("org.acme.Foo"), Kind.CLASS)), "org.acme.Foo", "Foo"); + assertName(Name.from(PrimitiveType.BOOLEAN), "boolean", "boolean"); + assertName(Name.from(ArrayType.create(PrimitiveType.LONG, 1)), "long[]", "long[]"); + assertName(Name.from(ArrayType.create(Type.create(DotName.createSimple("org.acme.Foo"), Kind.CLASS), 1)), + "org.acme.Foo[]", "Foo[]"); + assertName( + Name.from(ParameterizedType.create(DotName.createSimple("org.acme.Foo"), + new Type[] { Type.create(DotName.createSimple("java.lang.String"), Kind.CLASS) }, null)), + "org.acme.Foo", "Foo"); + assertName( + Name.from(ParameterizedType.create(DotName.createSimple("org.acme.Foo"), + new Type[] { ParameterizedType.create(DotName.createSimple("java.util.List"), + new Type[] { Type.create(DotName.createSimple("java.lang.String"), Kind.CLASS) }, null) }, + null)), + "org.acme.Foo>", "Foo>"); + } + + @Test + public void testFromAnnotation() { + assertName( + Name.from(AnnotationInstance.create(DotName.createSimple("org.acme.Bar"), null, + new AnnotationValue[] {})), + "@org.acme.Bar", "@Bar"); + assertName( + Name.from(AnnotationInstance.create(DotName.createSimple("org.acme.Bar"), null, + new AnnotationValue[] { AnnotationValue.createBooleanValue("checked", false) })), + "@org.acme.Bar(checked = false)", "@Bar(checked = false)"); + assertName( + Name.from(AnnotationInstance.create(DotName.createSimple("org.acme.Bar"), null, + new AnnotationValue[] { AnnotationValue.createClassValue("impl", + Type.create(DotName.createSimple("org.acme.Baz"), Kind.CLASS)) })), + "@org.acme.Bar(impl = org.acme.Baz)", "@Bar(impl = org.acme.Baz)"); + } + + private void assertName(Name name, String expectedName, String expectedSimpleName) { + assertEquals(expectedName, name.toString()); + assertEquals(expectedSimpleName, name.getSimpleName()); + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/exclude/ExcludeTypesTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/exclude/ExcludeTypesTest.java index 7679fdad1399c..c4908cf3ba7c7 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/exclude/ExcludeTypesTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/exclude/ExcludeTypesTest.java @@ -1,11 +1,21 @@ package io.quarkus.arc.test.exclude; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.TYPE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Instance; import javax.inject.Inject; +import javax.interceptor.AroundInvoke; +import javax.interceptor.Interceptor; +import javax.interceptor.InterceptorBinding; +import javax.interceptor.InvocationContext; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; @@ -23,9 +33,9 @@ public class ExcludeTypesTest { static final QuarkusUnitTest config = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(ExcludeTypesTest.class, Pong.class, Alpha.class, Bravo.class, Charlie.class, Bar.class, - Baz.class) + Baz.class, Magic.class, MagicInterceptor.class) .addAsResource(new StringAsset( - "quarkus.arc.exclude-types=Alpha,io.quarkus.arc.test.exclude.Bravo,io.quarkus.arc.test.exclude.bar.*,,io.quarkus.arc.test.exclude.baz.**"), + "quarkus.arc.exclude-types=Alpha,io.quarkus.arc.test.exclude.Bravo,io.quarkus.arc.test.exclude.bar.*,,io.quarkus.arc.test.exclude.baz.**,MagicInterceptor"), "application.properties")); @Inject @@ -51,6 +61,7 @@ public String ping() { @ApplicationScoped static class Charlie implements Pong { + @Magic public String ping() { return "charlie"; } @@ -63,4 +74,22 @@ public interface Pong { } + @Magic + @Interceptor + public static class MagicInterceptor { + + @AroundInvoke + public Object toUpperCase(InvocationContext context) throws Exception { + return context.proceed().toString().toUpperCase(); + } + + } + + @InterceptorBinding + @Target({ TYPE, METHOD }) + @Retention(RUNTIME) + @interface Magic { + + } + } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/log/InjectedLoggerTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/log/InjectedLoggerTest.java new file mode 100644 index 0000000000000..24e0bcb3a9039 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/log/InjectedLoggerTest.java @@ -0,0 +1,82 @@ +package io.quarkus.arc.test.log; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.context.Dependent; +import javax.inject.Inject; + +import org.jboss.logging.Logger; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.log.LoggerName; +import io.quarkus.test.QuarkusUnitTest; + +public class InjectedLoggerTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(SimpleBean.class)); + + @Inject + SimpleBean simpleBean; + + @Inject + AnotherSimpleBean anotherSimpleBean; + + @Test + public void testInjectedLogger() { + assertEquals(SimpleBean.class.getName(), simpleBean.getLog().getName()); + assertEquals("shared", simpleBean.getSharedLog().getName()); + assertEquals(AnotherSimpleBean.class.getName(), anotherSimpleBean.getLog().getName()); + assertEquals(simpleBean.getSharedLog(), anotherSimpleBean.getSharedLog()); + } + + @ApplicationScoped + static class SimpleBean { + + @Inject + Logger log; + + @LoggerName("shared") + Logger sharedLog; + + public Logger getLog() { + log.info("Someone is here!"); + return log; + } + + public Logger getSharedLog() { + return sharedLog; + } + + } + + @Dependent + static class AnotherSimpleBean { + + private final Logger log; + + @LoggerName("shared") + Logger sharedLog; + + public AnotherSimpleBean(Logger log) { + this.log = log; + } + + public Logger getLog() { + return log; + } + + public Logger getSharedLog() { + sharedLog.info("Yet another someone is here!"); + return sharedLog; + } + + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/observer/SyntheticObserverTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/observer/SyntheticObserverTest.java index 24583e5471241..e784c3dc94176 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/observer/SyntheticObserverTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/observer/SyntheticObserverTest.java @@ -10,14 +10,15 @@ import javax.enterprise.event.Observes; import javax.inject.Singleton; +import org.jboss.jandex.DotName; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.arc.Arc; -import io.quarkus.arc.deployment.ObserverRegistrarBuildItem; -import io.quarkus.arc.processor.ObserverRegistrar; +import io.quarkus.arc.deployment.ObserverRegistrationPhaseBuildItem; +import io.quarkus.arc.deployment.ObserverRegistrationPhaseBuildItem.ObserverConfiguratorBuildItem; import io.quarkus.builder.BuildChainBuilder; import io.quarkus.builder.BuildContext; import io.quarkus.builder.BuildStep; @@ -43,21 +44,22 @@ public void accept(BuildChainBuilder builder) { @Override public void execute(BuildContext context) { - context.produce(new ObserverRegistrarBuildItem(new ObserverRegistrar() { - @Override - public void register(RegistrationContext context) { - context.configure().observedType(String.class).notify(mc -> { - ResultHandle events = mc - .readStaticField(FieldDescriptor.of(MyObserver.class, "EVENTS", List.class)); - mc.invokeInterfaceMethod( - MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class), - events, mc.load("synthetic")); - mc.returnValue(null); - }).done(); - } - })); + ObserverRegistrationPhaseBuildItem observerRegistrationPhase = context + .consume(ObserverRegistrationPhaseBuildItem.class); + context.produce(new ObserverConfiguratorBuildItem( + observerRegistrationPhase.getContext().configure() + .beanClass(DotName.createSimple(SyntheticObserverTest.class.getName())) + .observedType(String.class).notify(mc -> { + ResultHandle events = mc + .readStaticField( + FieldDescriptor.of(MyObserver.class, "EVENTS", List.class)); + mc.invokeInterfaceMethod( + MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class), + events, mc.load("synthetic")); + mc.returnValue(null); + }))); } - }).produces(ObserverRegistrarBuildItem.class).build(); + }).consumes(ObserverRegistrationPhaseBuildItem.class).produces(ObserverConfiguratorBuildItem.class).build(); } }; } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/SimpleUnlessBuildPropertyTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/SimpleUnlessBuildPropertyTest.java new file mode 100644 index 0000000000000..934bc0b857f4a --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/SimpleUnlessBuildPropertyTest.java @@ -0,0 +1,61 @@ +package io.quarkus.arc.test.properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.properties.UnlessBuildProperty; +import io.quarkus.test.QuarkusUnitTest; + +public class SimpleUnlessBuildPropertyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(PingService.class, AlphaService.class, BravoService.class)) + .overrideConfigKey("foo.bar", "baz"); + + @Inject + PingService pingService; + + @Test + public void testInjection() { + // AlphaService is ignored + assertEquals(20, pingService.ping()); + } + + interface PingService { + + int ping(); + + } + + @UnlessBuildProperty(name = "foo.bar", stringValue = "baz") + @ApplicationScoped + static class AlphaService implements PingService { + + @Override + public int ping() { + return 10; + } + + } + + @UnlessBuildProperty(name = "foo.bar", stringValue = "qux") + @ApplicationScoped + static class BravoService implements PingService { + + @Override + public int ping() { + return 20; + } + + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/UnlessBuildPropertyTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/UnlessBuildPropertyTest.java new file mode 100644 index 0000000000000..ff4414cd8ddee --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/UnlessBuildPropertyTest.java @@ -0,0 +1,170 @@ +package io.quarkus.arc.test.properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import javax.enterprise.inject.Instance; +import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.CDI; +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.DefaultBean; +import io.quarkus.arc.properties.UnlessBuildProperty; +import io.quarkus.test.QuarkusUnitTest; + +public class UnlessBuildPropertyTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(Producer.class, AnotherProducer.class, + GreetingBean.class, PingBean.class, PongBean.class, FooBean.class, BarBean.class)) + .overrideConfigKey("some.prop1", "v1") + .overrideConfigKey("some.prop2", "v2"); + + @Inject + GreetingBean bean; + + @Inject + PingBean ping; + + @Inject + PongBean pongBean; + + @Inject + FooBean fooBean; + + @Inject + Instance barBean; + + @Test + public void testInjection() { + assertEquals("hello from matching prop. Foo is: foo from missing prop", bean.greet()); + assertEquals("ping", ping.ping()); + assertEquals("pong", pongBean.pong()); + assertEquals("foo from missing prop", fooBean.foo()); + assertTrue(barBean.isUnsatisfied()); + } + + @Test + public void testSelect() { + assertEquals("hello from matching prop. Foo is: foo from missing prop", + CDI.current().select(GreetingBean.class).get().greet()); + } + + @DefaultBean + @Singleton + static class GreetingBean { + + String greet() { + return "hola"; + } + } + + @DefaultBean + @Singleton + static class PingBean { + + String ping() { + return "ping"; + } + } + + @UnlessBuildProperty(name = "some.prop1", stringValue = "v1") // won't be enabled because the values don't match + @Singleton + static class PongBean { + + String pong() { + return "pong from non matching value"; + } + } + + static interface FooBean { + + String foo(); + } + + @UnlessBuildProperty(name = "some.other.prop1", stringValue = "v2") + static class BarBean { + + } + + @Singleton + static class Producer { + + @Produces + @UnlessBuildProperty(name = "some.prop2", stringValue = "v2", enableIfMissing = true) // won't be enabled because the property values match - enableIfMissing has no effect when the property does exist + PingBean nonMatchingPingBean; + + public Producer() { + this.nonMatchingPingBean = new PingBean() { + + @Override + String ping() { + return "ping dev"; + } + + }; + } + + @Produces + @UnlessBuildProperty(name = "some.prop1", stringValue = "v") + GreetingBean matchingValueGreetingBean(FooBean fooBean) { + return new GreetingBean() { + + @Override + String greet() { + return "hello from matching prop. Foo is: " + fooBean.foo(); + } + + }; + } + + @Produces + @UnlessBuildProperty(name = "some.prop2", stringValue = "v2") // won't be enabled because the property values match + GreetingBean nonMatchingValueGreetingBean(BarBean barBean) { + return new GreetingBean() { + + @Override + String greet() { + return "hello from dev"; + } + + }; + } + + @Produces + @DefaultBean + PongBean defaultPongBean() { + return new PongBean() { + @Override + String pong() { + return "pong"; + } + }; + } + + } + + @UnlessBuildProperty(name = "some.other.prop1", stringValue = "v", enableIfMissing = true) + static class AnotherProducer { + + @Produces + FooBean testFooBean; + + public AnotherProducer() { + testFooBean = new FooBean() { + @Override + public String foo() { + return "foo from missing prop"; + } + }; + } + } +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/qualifiers/QualifierRegistrarTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/qualifiers/QualifierRegistrarTest.java new file mode 100644 index 0000000000000..b917b9d66659e --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/qualifiers/QualifierRegistrarTest.java @@ -0,0 +1,90 @@ +package io.quarkus.arc.test.qualifiers; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +import javax.enterprise.context.Dependent; +import javax.enterprise.inject.Instance; +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.jandex.DotName; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.deployment.QualifierRegistrarBuildItem; +import io.quarkus.arc.processor.QualifierRegistrar; +import io.quarkus.builder.BuildContext; +import io.quarkus.builder.BuildStep; +import io.quarkus.test.QuarkusUnitTest; + +public class QualifierRegistrarTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(NotAQualifier.class, SimpleBean.class, Client.class)) + .addBuildChainCustomizer(b -> { + b.addBuildStep(new BuildStep() { + @Override + public void execute(BuildContext context) { + context.produce(new QualifierRegistrarBuildItem(new QualifierRegistrar() { + @Override + public Map> getAdditionalQualifiers() { + return Collections.singletonMap(DotName.createSimple(NotAQualifier.class.getName()), null); + } + })); + } + }).produces(QualifierRegistrarBuildItem.class).build(); + }); + + @Inject + Client client; + + @Test + public void testQualifier() { + assertTrue(client.instance.isUnsatisfied()); + assertEquals("PONG", client.simpleBean.ping()); + } + + @Dependent + static class Client { + + @NotAQualifier + SimpleBean simpleBean; + + @Inject + Instance instance; + + } + + @NotAQualifier + @Singleton + static class SimpleBean { + + public String ping() { + return "PONG"; + } + + } + + @Target({ TYPE, METHOD, FIELD, PARAMETER }) + @Retention(RUNTIME) + @interface NotAQualifier { + + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/MultipleAddMissingNoargsConstructorTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/MultipleAddMissingNoargsConstructorTest.java new file mode 100644 index 0000000000000..b8e3249a45f23 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/MultipleAddMissingNoargsConstructorTest.java @@ -0,0 +1,68 @@ +package io.quarkus.arc.test.unproxyable; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.context.Dependent; +import javax.enterprise.inject.Instance; +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class MultipleAddMissingNoargsConstructorTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(MyBean.class, MyBeanProducer.class)); + + @Inject + Instance myBeanInstances; + + @Test + public void testBeansProperlyCreated() { + List vals = new ArrayList<>(2); + for (MyBean myBeanInstance : myBeanInstances) { + vals.add(myBeanInstance.getVal()); + } + Collections.sort(vals); + Assertions.assertEquals(Arrays.asList("val1", "val2"), vals); + } + + static class MyBean { + + private final String val; + + MyBean(String val) { + this.val = val; + } + + public String getVal() { + return val; + } + } + + @Dependent + static class MyBeanProducer { + + @ApplicationScoped + public MyBean myBean1() { + return new MyBean("val1"); + } + + @ApplicationScoped + public MyBean myBean2() { + return new MyBean("val2"); + } + } + +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/SynthProxiableBeanWithoutNoArgConstructorTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/SynthProxiableBeanWithoutNoArgConstructorTest.java new file mode 100644 index 0000000000000..da72411db6389 --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/SynthProxiableBeanWithoutNoArgConstructorTest.java @@ -0,0 +1,79 @@ +package io.quarkus.arc.test.unproxyable; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.function.Consumer; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Vetoed; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.InstanceHandle; +import io.quarkus.arc.deployment.SyntheticBeanBuildItem; +import io.quarkus.builder.BuildChainBuilder; +import io.quarkus.builder.BuildContext; +import io.quarkus.builder.BuildStep; +import io.quarkus.gizmo.MethodDescriptor; +import io.quarkus.gizmo.ResultHandle; +import io.quarkus.test.QuarkusUnitTest; + +public class SynthProxiableBeanWithoutNoArgConstructorTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(SynthBean.class)) + .addBuildChainCustomizer(buildCustomizer()); + + static Consumer buildCustomizer() { + return new Consumer() { + + @Override + public void accept(BuildChainBuilder builder) { + builder.addBuildStep(new BuildStep() { + + @Override + public void execute(BuildContext context) { + context.produce(SyntheticBeanBuildItem.configure(SynthBean.class) + .scope(ApplicationScoped.class) + .types(SynthBean.class) + .unremovable() + .creator(mc -> { + ResultHandle ret = mc.newInstance( + MethodDescriptor.ofConstructor(SynthBean.class, String.class), + mc.load("foo")); + mc.returnValue(ret); + }) + .done()); + } + }).produces(SyntheticBeanBuildItem.class).build(); + } + }; + } + + @Test + public void testSyntheticBean() { + InstanceHandle instance = Arc.container().instance(SynthBean.class); + assertTrue(instance.isAvailable()); + assertEquals("foo", instance.get().getString()); + } + + @Vetoed + static class SynthBean { + private String s; + + public SynthBean(String s) { + this.s = s; + } + + public String getString() { + return s; + } + } +} diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java index 22bfca5a9c46e..ce03e4d9ede99 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java @@ -1,6 +1,12 @@ package io.quarkus.arc.test.unused; +import java.lang.reflect.Method; +import java.util.function.Consumer; + import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.BeanManager; +import javax.inject.Inject; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; @@ -12,7 +18,15 @@ import io.quarkus.arc.Arc; import io.quarkus.arc.ArcContainer; import io.quarkus.arc.InstanceHandle; +import io.quarkus.arc.deployment.BeanContainerBuildItem; +import io.quarkus.arc.runtime.BeanContainer; import io.quarkus.arc.test.unused.subpackage.Beta; +import io.quarkus.builder.BuildChainBuilder; +import io.quarkus.builder.BuildContext; +import io.quarkus.builder.BuildStep; +import io.quarkus.deployment.builditem.StaticBytecodeRecorderBuildItem; +import io.quarkus.deployment.recording.BytecodeRecorderImpl; +import io.quarkus.runtime.annotations.Recorder; import io.quarkus.test.QuarkusUnitTest; public class UnusedExclusionTest { @@ -21,13 +35,56 @@ public class UnusedExclusionTest { static final QuarkusUnitTest config = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(UnusedExclusionTest.class, Alpha.class, Beta.class, Charlie.class, Delta.class, - ProducerBean.class) + ProducerBean.class, TestRecorder.class, Gama.class, GamaProducer.class) .addAsResource(new StringAsset( "quarkus.arc.unremovable-types=io.quarkus.arc.test.unused.UnusedExclusionTest$Alpha,io.quarkus.arc.test.unused.subpackage.**,io.quarkus.arc.test.unused.Charlie,Delta"), - "application.properties")); + "application.properties")) + .addBuildChainCustomizer(buildCustomizer()); + + static Consumer buildCustomizer() { + return new Consumer() { + + @Override + public void accept(BuildChainBuilder builder) { + builder.addBuildStep(new BuildStep() { + + @Override + public void execute(BuildContext context) { + BeanContainer beanContainer = context.consume(BeanContainerBuildItem.class).getValue(); + BytecodeRecorderImpl bytecodeRecorder = new BytecodeRecorderImpl(true, + TestRecorder.class.getSimpleName(), + "test", "" + TestRecorder.class.hashCode()); + // We need to use reflection due to some class loading problems + Object recorderProxy = bytecodeRecorder.getRecordingProxy(TestRecorder.class); + try { + Method test = recorderProxy.getClass().getDeclaredMethod("test", BeanContainer.class); + Object[] args = new Object[1]; + args[0] = beanContainer; + test.invoke(recorderProxy, args); + } catch (Exception e) { + throw new RuntimeException(e); + } + context.produce(new StaticBytecodeRecorderBuildItem(bytecodeRecorder)); + } + }).consumes(BeanContainerBuildItem.class).produces(StaticBytecodeRecorderBuildItem.class).build(); + } + }; + } + + @Recorder + public static class TestRecorder { + + public void test(BeanContainer beanContainer) { + // This should trigger the warning - Gama was removed + Gama gama = beanContainer.instance(Gama.class); + // Test that fallback was used - no injection was performed + Assertions.assertNull(gama.beanManager); + } + + } @Test - public void testBeansWereNotRemoved() { + public void testBeans() { ArcContainer container = Arc.container(); String expectedBeanResponse = "ok"; InstanceHandle alphaInstance = container.instance(Alpha.class); @@ -45,6 +102,8 @@ public void testBeansWereNotRemoved() { InstanceHandle deltaInstance = container.instance(Delta.class); Assertions.assertTrue(deltaInstance.isAvailable()); Assertions.assertEquals(expectedBeanResponse, deltaInstance.get().ping()); + + Assertions.assertFalse(container.instance(Gama.class).isAvailable()); } // unused bean, won't be removed @@ -56,4 +115,26 @@ public String ping() { } } + + // unused bean, will be removed + @ApplicationScoped + public static class Gama { + + @Inject + BeanManager beanManager; + + public String ping() { + return "ok"; + } + + } + + public static class GamaProducer { + + @Produces + public Gama ping() { + return new Gama(); + } + + } } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/wrongsingleton/WrongSingletonTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/wrongsingleton/WrongSingletonTest.java new file mode 100644 index 0000000000000..0932bf4299a3b --- /dev/null +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/wrongsingleton/WrongSingletonTest.java @@ -0,0 +1,60 @@ +package io.quarkus.arc.test.wrongsingleton; + +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Collections; +import java.util.List; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Produces; +import javax.inject.Inject; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.runtime.util.ExceptionUtil; +import io.quarkus.test.QuarkusUnitTest; + +public class WrongSingletonTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(EjbSingleton.class)) + .assertException(t -> { + Throwable rootCause = ExceptionUtil.getRootCause(t); + assertTrue(rootCause.getMessage().contains("javax.ejb.Singleton"), t.toString()); + assertTrue(rootCause.getMessage().contains("com.google.inject.Singleton"), t.toString()); + }); + + @Test + public void testValidationFailed() { + // This method should not be invoked + fail(); + } + + @javax.ejb.Singleton + static class EjbSingleton { + + @Inject + @ConfigProperty(name = "unconfigured") + String foo; + + } + + @ApplicationScoped + static class GuiceProducers { + + @com.google.inject.Singleton + @Produces + List produceEjbSingleton() { + return Collections.emptyList(); + } + + } + +} diff --git a/extensions/arc/pom.xml b/extensions/arc/pom.xml index 5126cd7c61387..455b95ec65d3a 100644 --- a/extensions/arc/pom.xml +++ b/extensions/arc/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/arc/runtime/pom.xml b/extensions/arc/runtime/pom.xml index 6bec3f5e8c949..ae1c08899981d 100644 --- a/extensions/arc/runtime/pom.xml +++ b/extensions/arc/runtime/pom.xml @@ -6,7 +6,6 @@ quarkus-arc-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/log/LoggerName.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/log/LoggerName.java new file mode 100644 index 0000000000000..81644222f3b80 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/log/LoggerName.java @@ -0,0 +1,48 @@ +package io.quarkus.arc.log; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import javax.enterprise.util.AnnotationLiteral; +import javax.enterprise.util.Nonbinding; +import javax.inject.Qualifier; + +@Qualifier +@Retention(RUNTIME) +@Target({ FIELD, PARAMETER, METHOD }) +public @interface LoggerName { + + /** + * The logger name must not be empty. + * + * @return the logger name + */ + @Nonbinding + String value(); + + /** + * Supports inline instantiation of this qualifier. + */ + public static final class Literal extends AnnotationLiteral implements LoggerName { + + private static final long serialVersionUID = 1L; + + private final String value; + + public Literal(String value) { + this.value = value; + } + + @Override + public String value() { + return value; + } + + } + +} \ No newline at end of file diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/profile/UnlessBuildProfile.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/profile/UnlessBuildProfile.java index 1e9a495ab6740..f6f3951c37886 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/profile/UnlessBuildProfile.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/profile/UnlessBuildProfile.java @@ -7,7 +7,7 @@ /** * When applied to a bean class or producer method (or field), the bean will only be enabled - * if the Quarkus build time profile dot not match the specified annotation value. + * if the Quarkus build time profile does not match the specified annotation value. */ @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.METHOD, ElementType.TYPE, ElementType.FIELD }) diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/IfBuildProperty.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/IfBuildProperty.java index 5e83e6b495873..186357615e702 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/IfBuildProperty.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/IfBuildProperty.java @@ -8,6 +8,7 @@ /** * When applied to a bean class or producer method (or field), the bean will only be enabled * if the Quarkus build time property matches the provided value. + *

    * By default, the bean is not enabled when the build time property is not defined at all, but this behavior is configurable * via the {#code enableIfMissing} property. */ diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/UnlessBuildProperty.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/UnlessBuildProperty.java new file mode 100644 index 0000000000000..9abd62c7c5192 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/properties/UnlessBuildProperty.java @@ -0,0 +1,33 @@ +package io.quarkus.arc.properties; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * When applied to a bean class or producer method (or field), the bean will only be enabled + * if the Quarkus build time property does not match the provided value. + *

    + * By default, the bean is not enabled when the build time property is not defined at all, but this behavior is configurable + * via the {#code enableIfMissing} property. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.TYPE, ElementType.FIELD }) +public @interface UnlessBuildProperty { + + /** + * Name of the build time property to check + */ + String name(); + + /** + * The bean is enabled if the build time property (specified by {@code name}) does not match this value. + */ + String stringValue(); + + /** + * Determines if the bean is enabled when the property name specified by {@code name} has not been specified at all + */ + boolean enableIfMissing() default false; +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/AdditionalBean.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/AdditionalBean.java index 54747f84b708f..4b5dabc4c4b33 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/AdditionalBean.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/AdditionalBean.java @@ -6,10 +6,14 @@ import java.lang.annotation.Retention; import java.lang.annotation.Target; +import javax.enterprise.context.Dependent; import javax.enterprise.inject.Stereotype; /** - * This built-in stereotype is automatically added to all additional beans. + * This built-in stereotype is automatically added to all additional beans that do not have a scope annotation declared. + *

    + * Note that stereotypes are bean defining annotations and so bean classes annotated with this stereotype but no scope have + * their scope defaulted to {@link Dependent}. */ @Stereotype @Target({ TYPE }) diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcContainerSupplier.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcContainerSupplier.java new file mode 100644 index 0000000000000..ede27fd340226 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcContainerSupplier.java @@ -0,0 +1,13 @@ +package io.quarkus.arc.runtime; + +import java.util.function.Supplier; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.ArcContainer; + +public class ArcContainerSupplier implements Supplier { + @Override + public ArcContainer get() { + return Arc.container(); + } +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java index b4acc0f73cc70..82de993b4c258 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ArcRecorder.java @@ -1,36 +1,41 @@ package io.quarkus.arc.runtime; -import java.lang.annotation.Annotation; -import java.util.Arrays; -import java.util.Collection; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; +import java.util.function.Predicate; import java.util.function.Supplier; import org.jboss.logging.Logger; import io.quarkus.arc.Arc; import io.quarkus.arc.ArcContainer; -import io.quarkus.arc.InstanceHandle; -import io.quarkus.arc.ManagedContext; +import io.quarkus.arc.InjectableBean; +import io.quarkus.arc.InjectableBean.Kind; +import io.quarkus.arc.impl.ArcContainerImpl; +import io.quarkus.arc.runtime.test.PreloadedTestApplicationClassPredicate; +import io.quarkus.runtime.LaunchMode; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.ShutdownContext; +import io.quarkus.runtime.ShutdownEvent; +import io.quarkus.runtime.StartupEvent; import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.runtime.test.TestApplicationClassPredicate; -/** - */ @Recorder public class ArcRecorder { + private static final Logger LOG = Logger.getLogger(ArcRecorder.class); + /** * Used to hold the Supplier instances used for synthetic bean declarations. */ public static volatile Map> supplierMap; - private static final Logger LOGGER = Logger.getLogger(ArcRecorder.class.getName()); - public ArcContainer getContainer(ShutdownContext shutdown) throws Exception { ArcContainer container = Arc.initialize(); shutdown.addShutdownTask(new Runnable() { @@ -54,67 +59,47 @@ public void initRuntimeSupplierBeans(Map> beans) { supplierMap.putAll(beans); } - public BeanContainer initBeanContainer(ArcContainer container, List listeners, - Collection removedBeanTypes) + public BeanContainer initBeanContainer(ArcContainer container, List listeners) throws Exception { - if (container == null) { throw new IllegalArgumentException("Arc container was null"); } - BeanContainer beanContainer = new BeanContainer() { - @Override - public Factory instanceFactory(Class type, Annotation... qualifiers) { - Supplier> handleSupplier = container.instanceSupplier(type, qualifiers); - if (handleSupplier == null) { - if (removedBeanTypes.contains(type.getName())) { - // Note that this only catches the simplest use cases - LOGGER.warnf( - "Bean matching %s was marked as unused and removed during build.\nExtensions can eliminate false positives using:\n\t- a custom UnremovableBeanBuildItem\n\t- AdditionalBeanBuildItem(false, beanClazz)", - type); - } else { - LOGGER.debugf( - "No matching bean found for type %s and qualifiers %s. The bean might have been marked as unused and removed during build.", - type, Arrays.toString(qualifiers)); - } - return new DefaultInstanceFactory<>(type); - } - return new Factory() { - @Override - public Instance create() { - InstanceHandle handle = handleSupplier.get(); - return new Instance() { - @Override - public T get() { - return handle.get(); - } - - @Override - public void close() { - handle.close(); - } - }; - } - }; - } - - @Override - public ManagedContext requestContext() { - return container.requestContext(); - } - }; + BeanContainer beanContainer = new BeanContainerImpl(container); for (BeanContainerListener listener : listeners) { + long start = System.currentTimeMillis(); listener.created(beanContainer); + LOG.debugf("Bean container listener %s finished in %s ms", listener.getClass().getName(), + System.currentTimeMillis() - start); } return beanContainer; } - public void handleLifecycleEvents(ShutdownContext context, BeanContainer beanContainer) { - LifecycleEventRunner instance = beanContainer.instance(LifecycleEventRunner.class); - instance.fireStartupEvent(); + public void handleLifecycleEvents(ShutdownContext context, LaunchMode launchMode, + boolean disableApplicationLifecycleObservers) { + ArcContainerImpl container = ArcContainerImpl.instance(); + List> mockBeanClasses; + + // If needed then mock all app observers in the test mode + if (launchMode == LaunchMode.TEST && disableApplicationLifecycleObservers) { + Predicate predicate = container + .select(TestApplicationClassPredicate.class).get(); + mockBeanClasses = new ArrayList<>(); + for (InjectableBean bean : container.getBeans()) { + // Mock observers for all application class beans + if (bean.getKind() == Kind.CLASS && predicate.test(bean.getBeanClass().getName())) { + mockBeanClasses.add(bean.getBeanClass()); + } + } + } else { + mockBeanClasses = Collections.emptyList(); + } + + fireLifecycleEvent(container, new StartupEvent(), mockBeanClasses); + context.addShutdownTask(new Runnable() { @Override public void run() { - instance.fireShutdownEvent(); + fireLifecycleEvent(container, new ShutdownEvent(), mockBeanClasses); } }); } @@ -128,35 +113,22 @@ public Object get() { }; } - public BeanContainerListener initCommandLineArgs(Supplier args) { - return new BeanContainerListener() { - @Override - public void created(BeanContainer container) { - container.instance(CommandLineArgumentsProducer.class).setCommandLineArgs(args); - } - }; + public void initTestApplicationClassPredicate(Set applicationBeanClasses) { + PreloadedTestApplicationClassPredicate predicate = Arc.container() + .instance(PreloadedTestApplicationClassPredicate.class).get(); + predicate.setApplicationBeanClasses(applicationBeanClasses); } - private static final class DefaultInstanceFactory implements BeanContainer.Factory { - - final Class type; - - private DefaultInstanceFactory(Class type) { - this.type = type; + private void fireLifecycleEvent(ArcContainerImpl container, Object event, List> mockBeanClasses) { + if (!mockBeanClasses.isEmpty()) { + for (Class beanClass : mockBeanClasses) { + container.mockObserversFor(beanClass, true); + } } - - @Override - public BeanContainer.Instance create() { - try { - T instance = type.newInstance(); - return new BeanContainer.Instance() { - @Override - public T get() { - return instance; - } - }; - } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(e); + container.beanManager().getEvent().fire(event); + if (!mockBeanClasses.isEmpty()) { + for (Class beanClass : mockBeanClasses) { + container.mockObserversFor(beanClass, false); } } } diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanContainerImpl.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanContainerImpl.java new file mode 100644 index 0000000000000..e01fc9f2e5c5e --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanContainerImpl.java @@ -0,0 +1,80 @@ +package io.quarkus.arc.runtime; + +import java.lang.annotation.Annotation; +import java.util.Arrays; +import java.util.function.Supplier; + +import org.jboss.logging.Logger; + +import io.quarkus.arc.ArcContainer; +import io.quarkus.arc.InstanceHandle; +import io.quarkus.arc.ManagedContext; + +public class BeanContainerImpl implements BeanContainer { + + private static final Logger LOGGER = Logger.getLogger(BeanContainerImpl.class.getName()); + + private final ArcContainer container; + + public BeanContainerImpl(ArcContainer container) { + this.container = container; + } + + @Override + public Factory instanceFactory(Class type, Annotation... qualifiers) { + Supplier> handleSupplier = container.instanceSupplier(type, qualifiers); + if (handleSupplier == null) { + LOGGER.debugf( + "No matching bean found for type %s and qualifiers %s. The bean might have been marked as unused and removed during build.", + type, Arrays.toString(qualifiers)); + return new DefaultInstanceFactory<>(type); + } + return new Factory() { + @Override + public Instance create() { + InstanceHandle handle = handleSupplier.get(); + return new Instance() { + @Override + public T get() { + return handle.get(); + } + + @Override + public void close() { + handle.close(); + } + }; + } + }; + } + + @Override + public ManagedContext requestContext() { + return container.requestContext(); + } + + static final class DefaultInstanceFactory implements BeanContainer.Factory { + + final Class type; + + DefaultInstanceFactory(Class type) { + this.type = type; + } + + @Override + public BeanContainer.Instance create() { + try { + T instance = type.newInstance(); + return new BeanContainer.Instance() { + @Override + public T get() { + return instance; + } + }; + } catch (InstantiationException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanInvoker.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanInvoker.java index d0a0741e3e436..ed4c248a87b93 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanInvoker.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanInvoker.java @@ -10,7 +10,7 @@ */ public interface BeanInvoker { - default void invoke(T param) { + default void invoke(T param) throws Exception { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { invokeBean(param); @@ -24,6 +24,6 @@ default void invoke(T param) { } } - void invokeBean(T param); + void invokeBean(T param) throws Exception; } diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanLookupSupplier.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanLookupSupplier.java new file mode 100644 index 0000000000000..26543219e879f --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/BeanLookupSupplier.java @@ -0,0 +1,31 @@ +package io.quarkus.arc.runtime; + +import java.util.function.Supplier; + +import io.quarkus.arc.Arc; + +public class BeanLookupSupplier implements Supplier { + + private Class type; + + public BeanLookupSupplier() { + } + + public BeanLookupSupplier(Class type) { + this.type = type; + } + + public Class getType() { + return type; + } + + public BeanLookupSupplier setType(Class type) { + this.type = type; + return this; + } + + @Override + public Object get() { + return Arc.container().instance(type).get(); + } +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/CommandLineArgumentsProducer.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/CommandLineArgumentsProducer.java deleted file mode 100644 index 026285d4e8eac..0000000000000 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/CommandLineArgumentsProducer.java +++ /dev/null @@ -1,24 +0,0 @@ -package io.quarkus.arc.runtime; - -import java.util.function.Supplier; - -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; - -import io.quarkus.runtime.annotations.CommandLineArguments; - -@ApplicationScoped -public class CommandLineArgumentsProducer { - - private volatile Supplier commandLineArgs; - - @Produces - @CommandLineArguments - public String[] getCommandLineArgs() { - return commandLineArgs.get(); - } - - public void setCommandLineArgs(Supplier commandLineArgs) { - this.commandLineArgs = commandLineArgs; - } -} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigMappingCreator.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigMappingCreator.java new file mode 100644 index 0000000000000..b038d663a18e6 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigMappingCreator.java @@ -0,0 +1,19 @@ +package io.quarkus.arc.runtime; + +import java.util.Map; + +import javax.enterprise.context.spi.CreationalContext; + +import org.eclipse.microprofile.config.ConfigProvider; + +import io.quarkus.arc.BeanCreator; +import io.smallrye.config.SmallRyeConfig; + +public class ConfigMappingCreator implements BeanCreator { + @Override + public Object create(CreationalContext creationalContext, Map params) { + Class interfaceType = (Class) params.get("type"); + SmallRyeConfig config = (SmallRyeConfig) ConfigProvider.getConfig(); + return config.getConfigMapping(interfaceType); + } +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigRecorder.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigRecorder.java index 00f4a4398559e..59bce34c1a713 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigRecorder.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigRecorder.java @@ -10,6 +10,8 @@ import org.eclipse.microprofile.config.ConfigProvider; import io.quarkus.runtime.annotations.Recorder; +import io.smallrye.config.ConfigMappings; +import io.smallrye.config.SmallRyeConfig; /** * @author Martin Kouba @@ -44,6 +46,12 @@ public void validateConfigProperties(Map> properties) { } } + public void registerConfigMappings(final Set configMappingsWithPrefix) + throws Exception { + SmallRyeConfig config = (SmallRyeConfig) ConfigProvider.getConfig(); + ConfigMappings.registerConfigMappings(config, configMappingsWithPrefix); + } + private Class load(String className, ClassLoader cl) { switch (className) { case "boolean": diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LifecycleEventRunner.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LifecycleEventRunner.java deleted file mode 100644 index 1708172ba4d4d..0000000000000 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LifecycleEventRunner.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.arc.runtime; - -import javax.enterprise.context.Dependent; -import javax.enterprise.event.Event; -import javax.inject.Inject; - -import io.quarkus.runtime.ShutdownEvent; -import io.quarkus.runtime.StartupEvent; - -/** - * - */ -@Dependent -public class LifecycleEventRunner { - - @Inject - Event startup; - - @Inject - Event shutdown; - - public void fireStartupEvent() { - startup.fire(new StartupEvent()); - } - - public void fireShutdownEvent() { - shutdown.fire(new ShutdownEvent()); - } - -} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LoggerProducer.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LoggerProducer.java new file mode 100644 index 0000000000000..5a8b929bc3e95 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/LoggerProducer.java @@ -0,0 +1,49 @@ +package io.quarkus.arc.runtime; + +import java.lang.annotation.Annotation; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import javax.annotation.PreDestroy; +import javax.enterprise.context.Dependent; +import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.InjectionPoint; +import javax.inject.Singleton; + +import org.jboss.logging.Logger; + +import io.quarkus.arc.log.LoggerName; + +@Singleton +public class LoggerProducer { + + private final ConcurrentMap loggers = new ConcurrentHashMap<>(); + + @Dependent + @Produces + Logger getSimpleLogger(InjectionPoint injectionPoint) { + return loggers.computeIfAbsent(injectionPoint.getMember().getDeclaringClass().getName(), Logger::getLogger); + } + + @LoggerName("") + @Dependent + @Produces + Logger getLoggerWithCustomName(InjectionPoint injectionPoint) { + String name = null; + for (Annotation qualifier : injectionPoint.getQualifiers()) { + if (qualifier.annotationType().equals(LoggerName.class)) { + name = ((LoggerName) qualifier).value(); + } + } + if (name == null || name.isEmpty()) { + throw new IllegalStateException("Unable to derive the logger name at " + injectionPoint); + } + return loggers.computeIfAbsent(name, Logger::getLogger); + } + + @PreDestroy + void destroy() { + loggers.clear(); + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/context/ArcContextProvider.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/context/ArcContextProvider.java index 5537feb0935ad..92941faaa6148 100644 --- a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/context/ArcContextProvider.java +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/context/ArcContextProvider.java @@ -66,12 +66,6 @@ public String getThreadContextType() { return ThreadContext.CDI; } - // see ThreadContextController#endContext() - private static void deactivateRequestContext() throws IllegalStateException { - // Only deactivate the context - Arc.container().requestContext().deactivate(); - } - private static final class ClearContextSnapshot implements ThreadContextSnapshot { @Override @@ -92,7 +86,7 @@ public ThreadContextController begin() { } else { // context not active, activate blank one, deactivate afterwards requestContext.activate(); - return ArcContextProvider::deactivateRequestContext; + return requestContext::deactivate; } } } @@ -155,7 +149,7 @@ public ThreadContextController begin() { } else { // context not active, activate and pass it new instance, deactivate afterwards requestContext.activate(state); - return ArcContextProvider::deactivateRequestContext; + return requestContext::deactivate; } } diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/EventsMonitor.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/EventsMonitor.java new file mode 100644 index 0000000000000..72c0cb997ead1 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/EventsMonitor.java @@ -0,0 +1,120 @@ +package io.quarkus.arc.runtime.devconsole; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import javax.enterprise.context.BeforeDestroyed; +import javax.enterprise.context.Destroyed; +import javax.enterprise.context.Initialized; +import javax.enterprise.event.Observes; +import javax.enterprise.inject.Any; +import javax.enterprise.inject.Default; +import javax.enterprise.inject.spi.EventMetadata; +import javax.inject.Singleton; + +@Singleton +public class EventsMonitor { + + private static final int DEFAULT_LIMIT = 500; + + private volatile boolean skipContextEvents = true; + private final List events = Collections.synchronizedList(new ArrayList(DEFAULT_LIMIT)); + + void notify(@Observes Object payload, EventMetadata eventMetadata) { + if (skipContextEvents && isContextEvent(eventMetadata)) { + return; + } + if (events.size() > DEFAULT_LIMIT) { + // Remove some old data if the limit is exceeded + synchronized (events) { + if (events.size() > DEFAULT_LIMIT) { + events.subList(0, DEFAULT_LIMIT / 2).clear(); + } + } + } + events.add(EventInfo.from(eventMetadata)); + } + + public void clear() { + events.clear(); + } + + public List getLastEvents() { + List result = new ArrayList<>(events); + Collections.reverse(result); + return result; + } + + public boolean isSkipContextEvents() { + return skipContextEvents; + } + + public void toggleSkipContextEvents() { + // This is not thread-safe but we don't expect concurrent actions from dev ui + skipContextEvents = !skipContextEvents; + } + + boolean isContextEvent(EventMetadata eventMetadata) { + if (!eventMetadata.getType().equals(Object.class) || eventMetadata.getQualifiers().size() != 2) { + return false; + } + for (Annotation qualifier : eventMetadata.getQualifiers()) { + Type qualifierType = qualifier.annotationType(); + // @Any, @Initialized, @BeforeDestroyed or @Destroyed + if (!qualifierType.equals(Any.class) && !qualifierType.equals(Initialized.class) + && !qualifierType.equals(BeforeDestroyed.class) && !qualifierType.equals(Destroyed.class)) { + + return false; + } + } + return true; + } + + static class EventInfo { + + static EventInfo from(EventMetadata eventMetadata) { + List qualifiers; + if (eventMetadata.getQualifiers().size() == 1) { + // Just @Any + qualifiers = Collections.emptyList(); + } else { + qualifiers = new ArrayList<>(1); + for (Annotation qualifier : eventMetadata.getQualifiers()) { + // Skip @Any and @Default + if (!qualifier.annotationType().equals(Any.class) && !qualifier.annotationType().equals(Default.class)) { + qualifiers.add(qualifier); + } + } + } + return new EventInfo(eventMetadata.getType(), qualifiers); + } + + private final LocalDateTime timestamp; + private final Type type; + private final List qualifiers; + + EventInfo(Type type, List qualifiers) { + this.timestamp = LocalDateTime.now(); + this.type = type; + this.qualifiers = qualifiers; + } + + public LocalDateTime getTimestamp() { + return timestamp; + } + + public String getType() { + return type.getTypeName(); + } + + public List getQualifiers() { + return qualifiers; + } + + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Invocation.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Invocation.java new file mode 100644 index 0000000000000..0ef4947314283 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Invocation.java @@ -0,0 +1,182 @@ +package io.quarkus.arc.runtime.devconsole; + +import java.lang.reflect.Method; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import io.quarkus.arc.InjectableBean; + +/** + * Business method invocation. + */ +public class Invocation { + + private final InjectableBean interceptedBean; + /** + * Start time in ms + */ + private final long start; + /** + * Duration in ns + */ + private final long duration; + private final Method method; + private final Kind kind; + private final String message; + private final List children; + + Invocation(InjectableBean interceptedBean, long start, long duration, + Method method, Kind kind, String message, List children) { + this.interceptedBean = interceptedBean; + this.start = start; + this.duration = duration; + this.method = method; + this.children = children; + this.kind = kind; + this.message = message; + } + + public InjectableBean getInterceptedBean() { + return interceptedBean; + } + + public long getStart() { + return start; + } + + public String getStartFormatted() { + return LocalDateTime.ofInstant(Instant.ofEpochMilli(start), ZoneId.systemDefault()).toString(); + } + + public long getDuration() { + return duration; + } + + public long getDurationMillis() { + return TimeUnit.NANOSECONDS.toMillis(duration); + } + + public Method getMethod() { + return method; + } + + public String getDeclaringClassName() { + return method.getDeclaringClass().getName(); + } + + public List getChildren() { + return children; + } + + public Kind getKind() { + return kind; + } + + public String getMessage() { + return message; + } + + @Override + public String toString() { + return kind + " invocation of " + method; + } + + public String getPackageName(String name) { + int lastDot = name.lastIndexOf('.'); + if (lastDot != -1) { + return name.substring(0, lastDot); + } + return ""; + } + + public enum Kind { + + BUSINESS, + PRODUCER, + DISPOSER, + OBSERVER + + } + + static class Builder { + + private InjectableBean interceptedBean; + private long start; + private long duration; + private Method method; + private List children; + private Builder parent; + private Kind kind; + private String message; + + Builder newChild() { + Invocation.Builder child = new Builder(); + addChild(child); + return child; + } + + Builder setInterceptedBean(InjectableBean bean) { + this.interceptedBean = bean; + return this; + } + + Builder setStart(long start) { + this.start = start; + return this; + } + + Builder setDuration(long duration) { + this.duration = duration; + return this; + } + + Builder setMethod(Method method) { + this.method = method; + return this; + } + + Builder setKind(Kind kind) { + this.kind = kind; + return this; + } + + Builder getParent() { + return parent; + } + + Builder setParent(Builder parent) { + this.parent = parent; + return this; + } + + Builder setMessage(String message) { + this.message = message; + return this; + } + + boolean addChild(Builder child) { + if (children == null) { + children = new ArrayList<>(); + } + child.setParent(this); + return children.add(child); + } + + Invocation build() { + List invocations = null; + if (children != null) { + invocations = new ArrayList<>(children.size()); + for (Builder builder : children) { + invocations.add(builder.build()); + } + } + return new Invocation(interceptedBean, start, duration, method, kind, message, invocations); + } + + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationInterceptor.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationInterceptor.java new file mode 100644 index 0000000000000..74c96b01fbf2d --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationInterceptor.java @@ -0,0 +1,96 @@ +package io.quarkus.arc.runtime.devconsole; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; + +import javax.annotation.Priority; +import javax.enterprise.event.Observes; +import javax.enterprise.event.ObservesAsync; +import javax.enterprise.inject.Disposes; +import javax.enterprise.inject.Instance; +import javax.enterprise.inject.Intercepted; +import javax.enterprise.inject.Produces; +import javax.inject.Inject; +import javax.interceptor.AroundInvoke; +import javax.interceptor.Interceptor; +import javax.interceptor.InvocationContext; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.InjectableBean; +import io.quarkus.arc.ManagedContext; +import io.quarkus.arc.runtime.devconsole.Invocation.Builder; +import io.quarkus.arc.runtime.devconsole.Invocation.Kind; + +@Priority(Interceptor.Priority.LIBRARY_BEFORE) +@Monitored +@Interceptor +public class InvocationInterceptor { + + @Inject + InvocationsMonitor invocationMonitor; + + @Intercepted + InjectableBean bean; + + @Inject + Instance invocationTree; + + @AroundInvoke + public Object monitor(InvocationContext context) throws Exception { + ManagedContext requestContext = Arc.container().requestContext(); + if (requestContext.isActive()) { + InvocationTree tree = invocationTree.get(); + return proceed(tree.invocationStarted(bean, context.getMethod(), getKind(context)), context, requestContext, tree); + } else { + return proceed( + new Builder().setInterceptedBean(bean).setMethod(context.getMethod()).setKind(getKind(context)) + .setStart(System.currentTimeMillis()), + context, requestContext, null); + } + } + + Object proceed(Invocation.Builder builder, InvocationContext context, ManagedContext requestContext, InvocationTree tree) + throws Exception { + long nanoTime = System.nanoTime(); + // Object result; + try { + return context.proceed(); + } catch (Exception e) { + builder.setMessage(e.getMessage()); + throw e; + } finally { + builder.setDuration(System.nanoTime() - nanoTime); + if (builder.getParent() == null) { + // Flush the data about a top-level invocation + invocationMonitor.addInvocation(builder.build()); + } + if (tree != null && requestContext.isActive()) { + tree.invocationCompleted(); + } + } + } + + Invocation.Kind getKind(InvocationContext ctx) { + // This will only work for "unmodified" discovered types + Method method = ctx.getMethod(); + if (!method.getReturnType().equals(Void.TYPE) && method.isAnnotationPresent(Produces.class)) { + return Kind.PRODUCER; + } else if (method.getParameterCount() > 0) { + Annotation[][] parameterAnnotations = method.getParameterAnnotations(); + if (parameterAnnotations.length > 0) { + for (Annotation[] annotations : parameterAnnotations) { + for (Annotation annotation : annotations) { + Class type = annotation.annotationType(); + if (Observes.class.equals(type) || ObservesAsync.class.equals(type)) { + return Kind.OBSERVER; + } else if (Disposes.class.equals(type)) { + return Kind.DISPOSER; + } + } + } + } + } + return Kind.BUSINESS; + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationTree.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationTree.java new file mode 100644 index 0000000000000..6c1afff940609 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationTree.java @@ -0,0 +1,44 @@ +package io.quarkus.arc.runtime.devconsole; + +import java.lang.reflect.Method; + +import javax.enterprise.context.RequestScoped; + +import io.quarkus.arc.InjectableBean; + +@RequestScoped +public class InvocationTree { + + // The current invocation builder + // It is volatile because a request scoped bean should not be invoked concurrently, however it can be invoked on a different thread + private volatile Invocation.Builder current; + + Invocation.Builder invocationStarted(InjectableBean bean, Method method, Invocation.Kind kind) { + Invocation.Builder builder = this.current; + if (builder == null) { + // Entry point + builder = new Invocation.Builder(); + } else { + // Nested invocation + builder = builder.newChild(); + } + builder.setStart(System.currentTimeMillis()).setInterceptedBean(bean) + .setMethod(method).setKind(kind); + this.current = builder; + return builder; + } + + void invocationCompleted() { + Invocation.Builder current = this.current; + if (current == null) { + // Something went wrong, for example the request context was terminated unexpectedly + return; + } + if (current.getParent() != null) { + this.current = current.getParent(); + } else { + this.current = null; + } + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationsMonitor.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationsMonitor.java new file mode 100644 index 0000000000000..0a65ff3bee254 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/InvocationsMonitor.java @@ -0,0 +1,58 @@ +package io.quarkus.arc.runtime.devconsole; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import javax.inject.Singleton; + +@Singleton +public class InvocationsMonitor { + + private static final int DEFAULT_LIMIT = 1000; + + private final List invocations = Collections.synchronizedList(new ArrayList<>()); + + private volatile boolean filterOutQuarkusBeans = true; + + void addInvocation(Invocation invocation) { + if (invocations.size() > DEFAULT_LIMIT) { + // Remove some old data if the limit is exceeded + synchronized (invocations) { + if (invocations.size() > DEFAULT_LIMIT) { + invocations.subList(0, DEFAULT_LIMIT / 2).clear(); + } + } + } + invocations.add(invocation); + } + + public List getLastInvocations() { + List result = new ArrayList<>(invocations); + if (filterOutQuarkusBeans) { + for (Iterator it = result.iterator(); it.hasNext();) { + Invocation invocation = it.next(); + if (invocation.getInterceptedBean().getBeanClass().getName().startsWith("io.quarkus")) { + it.remove(); + } + } + } + Collections.reverse(result); + return result; + } + + public void clear() { + invocations.clear(); + } + + public boolean isFilterOutQuarkusBeans() { + return filterOutQuarkusBeans; + } + + public void toggleFilterOutQuarkusBeans() { + // This is not thread-safe but we don't expect concurrent actions from dev ui + filterOutQuarkusBeans = !filterOutQuarkusBeans; + } + +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Monitored.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Monitored.java new file mode 100644 index 0000000000000..bd8a9e1c15d24 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/devconsole/Monitored.java @@ -0,0 +1,18 @@ +package io.quarkus.arc.runtime.devconsole; + +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.TYPE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import javax.interceptor.InterceptorBinding; + +@Inherited +@InterceptorBinding +@Target({ TYPE, METHOD }) +@Retention(RUNTIME) +public @interface Monitored { +} diff --git a/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/test/PreloadedTestApplicationClassPredicate.java b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/test/PreloadedTestApplicationClassPredicate.java new file mode 100644 index 0000000000000..9e47053377c35 --- /dev/null +++ b/extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/test/PreloadedTestApplicationClassPredicate.java @@ -0,0 +1,23 @@ +package io.quarkus.arc.runtime.test; + +import java.util.Set; + +import javax.inject.Singleton; + +import io.quarkus.runtime.test.TestApplicationClassPredicate; + +@Singleton +public class PreloadedTestApplicationClassPredicate implements TestApplicationClassPredicate { + + private volatile Set applicationBeanClasses; + + @Override + public boolean test(String name) { + return applicationBeanClasses.contains(name); + } + + public void setApplicationBeanClasses(Set applicationBeanClasses) { + this.applicationBeanClasses = applicationBeanClasses; + } + +} diff --git a/extensions/arc/runtime/src/main/resources/META-INF/services/io.quarkus.runtime.InterceptorBindingService$Provider b/extensions/arc/runtime/src/main/resources/META-INF/services/io.quarkus.runtime.InterceptorBindingService$Provider index ac107a38a0be5..2b70cdf694fbd 100644 --- a/extensions/arc/runtime/src/main/resources/META-INF/services/io.quarkus.runtime.InterceptorBindingService$Provider +++ b/extensions/arc/runtime/src/main/resources/META-INF/services/io.quarkus.runtime.InterceptorBindingService$Provider @@ -5,7 +5,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/extensions/artemis-core/deployment/src/main/java/io/quarkus/artemis/core/deployment/ArtemisCoreProcessor.java b/extensions/artemis-core/deployment/src/main/java/io/quarkus/artemis/core/deployment/ArtemisCoreProcessor.java index f3dd5fe2d86e4..97aba3d6076f6 100644 --- a/extensions/artemis-core/deployment/src/main/java/io/quarkus/artemis/core/deployment/ArtemisCoreProcessor.java +++ b/extensions/artemis-core/deployment/src/main/java/io/quarkus/artemis/core/deployment/ArtemisCoreProcessor.java @@ -3,6 +3,9 @@ import java.util.Collection; import java.util.Optional; +import javax.enterprise.context.ApplicationScoped; + +import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.api.core.client.loadbalance.ConnectionLoadBalancingPolicy; import org.apache.activemq.artemis.api.core.client.loadbalance.FirstElementConnectionLoadBalancingPolicy; import org.apache.activemq.artemis.api.core.client.loadbalance.RandomConnectionLoadBalancingPolicy; @@ -14,9 +17,7 @@ import org.jboss.jandex.DotName; import org.jboss.logging.Logger; -import io.quarkus.arc.deployment.AdditionalBeanBuildItem; -import io.quarkus.arc.deployment.BeanContainerBuildItem; -import io.quarkus.artemis.core.runtime.ArtemisCoreProducer; +import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.artemis.core.runtime.ArtemisCoreRecorder; import io.quarkus.artemis.core.runtime.ArtemisRuntimeConfig; import io.quarkus.deployment.Feature; @@ -92,25 +93,32 @@ HealthBuildItem health(ArtemisBuildTimeConfig buildConfig, Optional additionalBean, BuildProducer feature, - Optional artemisJms) { + void load(BuildProducer feature, Optional artemisJms) { if (artemisJms.isPresent()) { return; } feature.produce(new FeatureBuildItem(Feature.ARTEMIS_CORE)); - additionalBean.produce(AdditionalBeanBuildItem.unremovableOf(ArtemisCoreProducer.class)); } @Record(ExecutionTime.RUNTIME_INIT) @BuildStep ArtemisCoreConfiguredBuildItem configure(ArtemisCoreRecorder recorder, ArtemisRuntimeConfig runtimeConfig, - BeanContainerBuildItem beanContainer, Optional artemisJms) { + BuildProducer syntheticBeanProducer, Optional artemisJms) { if (artemisJms.isPresent()) { return null; } - recorder.setConfig(runtimeConfig, beanContainer.getValue()); + + SyntheticBeanBuildItem serverLocator = SyntheticBeanBuildItem.configure(ServerLocator.class) + .supplier(recorder.getServerLocatorSupplier(runtimeConfig)) + .scope(ApplicationScoped.class) + .defaultBean() + .unremovable() + .setRuntimeInit() + .done(); + syntheticBeanProducer.produce(serverLocator); + return new ArtemisCoreConfiguredBuildItem(); } } diff --git a/extensions/artemis-core/pom.xml b/extensions/artemis-core/pom.xml index 81e68f5e51c7e..81a3c3154868a 100644 --- a/extensions/artemis-core/pom.xml +++ b/extensions/artemis-core/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/artemis-core/runtime/pom.xml b/extensions/artemis-core/runtime/pom.xml index c3660a1d4241c..ccf838385bc7e 100644 --- a/extensions/artemis-core/runtime/pom.xml +++ b/extensions/artemis-core/runtime/pom.xml @@ -43,20 +43,6 @@ org.apache.activemq artemis-core-client - - - org.apache.geronimo.specs - geronimo-json_1.0_spec - - - org.apache.johnzon - johnzon-core - - - commons-logging - commons-logging - - @@ -67,6 +53,7 @@ org.graalvm.nativeimage svm + provided diff --git a/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreProducer.java b/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreProducer.java deleted file mode 100644 index 6ee05aeb1d237..0000000000000 --- a/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreProducer.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.artemis.core.runtime; - -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; - -import org.apache.activemq.artemis.api.core.client.ActiveMQClient; -import org.apache.activemq.artemis.api.core.client.ServerLocator; - -import io.quarkus.arc.DefaultBean; - -@ApplicationScoped -public class ArtemisCoreProducer { - - private ArtemisRuntimeConfig config; - - @Produces - @ApplicationScoped - @DefaultBean - public ServerLocator serverLocator() throws Exception { - return ActiveMQClient.createServerLocator(config.url); - } - - public ArtemisRuntimeConfig getConfig() { - return config; - } - - public void setConfig(ArtemisRuntimeConfig config) { - this.config = config; - } -} diff --git a/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreRecorder.java b/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreRecorder.java index 654e08d7c6a04..1c752cf508da2 100644 --- a/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreRecorder.java +++ b/extensions/artemis-core/runtime/src/main/java/io/quarkus/artemis/core/runtime/ArtemisCoreRecorder.java @@ -1,12 +1,25 @@ package io.quarkus.artemis.core.runtime; -import io.quarkus.arc.runtime.BeanContainer; +import java.util.function.Supplier; + +import org.apache.activemq.artemis.api.core.client.ActiveMQClient; +import org.apache.activemq.artemis.api.core.client.ServerLocator; + import io.quarkus.runtime.annotations.Recorder; @Recorder public class ArtemisCoreRecorder { - public void setConfig(ArtemisRuntimeConfig config, BeanContainer container) { - container.instance(ArtemisCoreProducer.class).setConfig(config); + public Supplier getServerLocatorSupplier(ArtemisRuntimeConfig config) { + return new Supplier() { + @Override + public ServerLocator get() { + try { + return ActiveMQClient.createServerLocator(config.url); + } catch (Exception e) { + throw new RuntimeException("Could not create ServerLocator", e); + } + } + }; } } diff --git a/extensions/artemis-jms/deployment/src/main/java/io/quarkus/artemis/jms/deployment/ArtemisJmsProcessor.java b/extensions/artemis-jms/deployment/src/main/java/io/quarkus/artemis/jms/deployment/ArtemisJmsProcessor.java index a2343bbc064d1..f8621d8fd8faa 100644 --- a/extensions/artemis-jms/deployment/src/main/java/io/quarkus/artemis/jms/deployment/ArtemisJmsProcessor.java +++ b/extensions/artemis-jms/deployment/src/main/java/io/quarkus/artemis/jms/deployment/ArtemisJmsProcessor.java @@ -1,11 +1,12 @@ package io.quarkus.artemis.jms.deployment; -import io.quarkus.arc.deployment.AdditionalBeanBuildItem; -import io.quarkus.arc.deployment.BeanContainerBuildItem; +import javax.enterprise.context.ApplicationScoped; +import javax.jms.ConnectionFactory; + +import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.artemis.core.deployment.ArtemisBuildTimeConfig; import io.quarkus.artemis.core.deployment.ArtemisJmsBuildItem; import io.quarkus.artemis.core.runtime.ArtemisRuntimeConfig; -import io.quarkus.artemis.jms.runtime.ArtemisJmsProducer; import io.quarkus.artemis.jms.runtime.ArtemisJmsRecorder; import io.quarkus.deployment.Feature; import io.quarkus.deployment.annotations.BuildProducer; @@ -18,12 +19,10 @@ public class ArtemisJmsProcessor { @BuildStep - void load(BuildProducer additionalBean, BuildProducer feature, - BuildProducer artemisJms) { + void load(BuildProducer feature, BuildProducer artemisJms) { artemisJms.produce(new ArtemisJmsBuildItem()); feature.produce(new FeatureBuildItem(Feature.ARTEMIS_JMS)); - additionalBean.produce(AdditionalBeanBuildItem.unremovableOf(ArtemisJmsProducer.class)); } @BuildStep @@ -36,9 +35,17 @@ HealthBuildItem health(ArtemisBuildTimeConfig buildConfig) { @Record(ExecutionTime.RUNTIME_INIT) @BuildStep ArtemisJmsConfiguredBuildItem configure(ArtemisJmsRecorder recorder, ArtemisRuntimeConfig runtimeConfig, - BeanContainerBuildItem beanContainer) { + BuildProducer syntheticBeanProducer) { + + SyntheticBeanBuildItem connectionFactory = SyntheticBeanBuildItem.configure(ConnectionFactory.class) + .supplier(recorder.getConnectionFactorySupplier(runtimeConfig)) + .scope(ApplicationScoped.class) + .defaultBean() + .unremovable() + .setRuntimeInit() + .done(); + syntheticBeanProducer.produce(connectionFactory); - recorder.setConfig(runtimeConfig, beanContainer.getValue()); return new ArtemisJmsConfiguredBuildItem(); } } diff --git a/extensions/artemis-jms/pom.xml b/extensions/artemis-jms/pom.xml index bc14e024dc9aa..4fe85d1fbcbd9 100644 --- a/extensions/artemis-jms/pom.xml +++ b/extensions/artemis-jms/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsProducer.java b/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsProducer.java deleted file mode 100644 index 7e4748c7a170b..0000000000000 --- a/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsProducer.java +++ /dev/null @@ -1,32 +0,0 @@ -package io.quarkus.artemis.jms.runtime; - -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; -import javax.jms.ConnectionFactory; - -import org.apache.activemq.artemis.jms.client.ActiveMQJMSConnectionFactory; - -import io.quarkus.arc.DefaultBean; -import io.quarkus.artemis.core.runtime.ArtemisRuntimeConfig; - -@ApplicationScoped -public class ArtemisJmsProducer { - - private ArtemisRuntimeConfig config; - - @Produces - @ApplicationScoped - @DefaultBean - public ConnectionFactory connectionFactory() { - return new ActiveMQJMSConnectionFactory(config.url, - config.username.orElse(null), config.password.orElse(null)); - } - - public ArtemisRuntimeConfig getConfig() { - return config; - } - - public void setConfig(ArtemisRuntimeConfig config) { - this.config = config; - } -} diff --git a/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsRecorder.java b/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsRecorder.java index fffef19ee8396..8069ced0bba59 100644 --- a/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsRecorder.java +++ b/extensions/artemis-jms/runtime/src/main/java/io/quarkus/artemis/jms/runtime/ArtemisJmsRecorder.java @@ -1,13 +1,23 @@ package io.quarkus.artemis.jms.runtime; -import io.quarkus.arc.runtime.BeanContainer; +import java.util.function.Supplier; + +import javax.jms.ConnectionFactory; + +import org.apache.activemq.artemis.jms.client.ActiveMQJMSConnectionFactory; + import io.quarkus.artemis.core.runtime.ArtemisRuntimeConfig; import io.quarkus.runtime.annotations.Recorder; @Recorder public class ArtemisJmsRecorder { - public void setConfig(ArtemisRuntimeConfig config, BeanContainer container) { - container.instance(ArtemisJmsProducer.class).setConfig(config); + public Supplier getConnectionFactorySupplier(ArtemisRuntimeConfig config) { + return new Supplier() { + @Override + public ConnectionFactory get() { + return new ActiveMQJMSConnectionFactory(config.url, config.username.orElse(null), config.password.orElse(null)); + } + }; } } diff --git a/extensions/avro/deployment/src/main/java/io/quarkus/avro/deployment/AvroProcessor.java b/extensions/avro/deployment/src/main/java/io/quarkus/avro/deployment/AvroProcessor.java index 1cf6a40f8ed89..d1f96c6f6b280 100644 --- a/extensions/avro/deployment/src/main/java/io/quarkus/avro/deployment/AvroProcessor.java +++ b/extensions/avro/deployment/src/main/java/io/quarkus/avro/deployment/AvroProcessor.java @@ -2,7 +2,6 @@ import java.util.Collection; -import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.specific.AvroGenerated; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; @@ -24,14 +23,12 @@ public void build(CombinedIndexBuildItem indexBuildItem, BuildProducer conf) { NativeImageConfigBuildItem.Builder builder = NativeImageConfigBuildItem.builder(); - builder.addRuntimeInitializedClass(GenericDatumReader.class.getName()); Collection annotations = indexBuildItem.getIndex() .getAnnotations(DotName.createSimple(AvroGenerated.class.getName())); for (AnnotationInstance annotation : annotations) { if (annotation.target().kind() == AnnotationTarget.Kind.CLASS) { String className = annotation.target().asClass().name().toString(); - builder.addRuntimeInitializedClass(className); reflectiveClass.produce(new ReflectiveClassBuildItem(true, true, true, className)); } } diff --git a/extensions/avro/pom.xml b/extensions/avro/pom.xml index 0712b2d3ba836..5cfa0fa587bcf 100644 --- a/extensions/avro/pom.xml +++ b/extensions/avro/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/avro/runtime/pom.xml b/extensions/avro/runtime/pom.xml index b44c445472a23..ff58f1febfdb6 100644 --- a/extensions/avro/runtime/pom.xml +++ b/extensions/avro/runtime/pom.xml @@ -27,6 +27,7 @@ org.graalvm.nativeimage svm + provided diff --git a/extensions/avro/runtime/src/main/java/io/quarkus/avro/graal/AvroSubstitutions.java b/extensions/avro/runtime/src/main/java/io/quarkus/avro/graal/AvroSubstitutions.java index 6d480fef3d507..009db1593e49e 100644 --- a/extensions/avro/runtime/src/main/java/io/quarkus/avro/graal/AvroSubstitutions.java +++ b/extensions/avro/runtime/src/main/java/io/quarkus/avro/graal/AvroSubstitutions.java @@ -2,10 +2,16 @@ import java.lang.reflect.Constructor; import java.util.HashMap; +import java.util.IdentityHashMap; import java.util.Map; import java.util.function.Function; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.ResolvingDecoder; +import org.apache.avro.util.WeakIdentityHashMap; import com.oracle.svm.core.annotate.Alias; import com.oracle.svm.core.annotate.Inject; @@ -79,5 +85,45 @@ public Target_org_apache_avro_reflect_ReflectData_ClassAccessorData(Class c) } +@TargetClass(className = "org.apache.avro.generic.GenericDatumReader") +final class Target_org_apache_avro_generic_GenericDatumReader { + + @Alias + private GenericData data; + @Alias + private Schema actual; + @Alias + private Schema expected; + @Alias + private DatumReader fastDatumReader; + @Alias + private ResolvingDecoder creatorResolver; + @Alias + @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.Reset) + private Thread creator; + + @Alias + @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.FromAlias) + private static ThreadLocal>> RESOLVER_CACHE = ThreadLocal.withInitial( + WeakIdentityHashMap::new); + @Alias + @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.Reset) + private Map stringClassCache; + @Alias + @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.Reset) + private Map stringCtorCache; + + @Substitute + protected Target_org_apache_avro_generic_GenericDatumReader(GenericData data) { + this.fastDatumReader = null; + this.creatorResolver = null; + this.stringClassCache = new IdentityHashMap(); + this.stringCtorCache = new HashMap(); + this.data = data; + this.creator = Thread.currentThread(); + } + +} + class AvroSubstitutions { } diff --git a/extensions/azure-functions-http/deployment/pom.xml b/extensions/azure-functions-http/deployment/pom.xml index 307de958f33e9..873720ccb3ae7 100644 --- a/extensions/azure-functions-http/deployment/pom.xml +++ b/extensions/azure-functions-http/deployment/pom.xml @@ -6,7 +6,6 @@ quarkus-azure-functions-http-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 @@ -29,6 +28,7 @@ org.graalvm.nativeimage svm + provided diff --git a/extensions/azure-functions-http/deployment/src/main/java/io/quarkus/azure/functions/resteasy/deployment/AzureFunctionsHttpProcessor.java b/extensions/azure-functions-http/deployment/src/main/java/io/quarkus/azure/functions/resteasy/deployment/AzureFunctionsHttpProcessor.java index e64cfef50b761..aa2f04dbdf1df 100644 --- a/extensions/azure-functions-http/deployment/src/main/java/io/quarkus/azure/functions/resteasy/deployment/AzureFunctionsHttpProcessor.java +++ b/extensions/azure-functions-http/deployment/src/main/java/io/quarkus/azure/functions/resteasy/deployment/AzureFunctionsHttpProcessor.java @@ -4,12 +4,19 @@ import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.pkg.builditem.UberJarRequiredBuildItem; import io.quarkus.runtime.LaunchMode; import io.quarkus.vertx.http.deployment.RequireVirtualHttpBuildItem; public class AzureFunctionsHttpProcessor { private static final Logger log = Logger.getLogger(AzureFunctionsHttpProcessor.class); + @BuildStep + public UberJarRequiredBuildItem forceUberJar() { + // Azure Functions needs a single JAR inside a dedicated directory + return new UberJarRequiredBuildItem(); + } + @BuildStep public RequireVirtualHttpBuildItem requestVirtualHttp(LaunchModeBuildItem launchMode) { return launchMode.getLaunchMode() == LaunchMode.NORMAL ? RequireVirtualHttpBuildItem.MARKER : null; diff --git a/extensions/azure-functions-http/maven-archetype/pom.xml b/extensions/azure-functions-http/maven-archetype/pom.xml index 363f2f2b28ea0..c41613ce774f0 100644 --- a/extensions/azure-functions-http/maven-archetype/pom.xml +++ b/extensions/azure-functions-http/maven-archetype/pom.xml @@ -6,7 +6,6 @@ quarkus-azure-functions-http-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 diff --git a/extensions/azure-functions-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/extensions/azure-functions-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml index ca92bc8d96dc3..3fc29d816748a 100644 --- a/extensions/azure-functions-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml +++ b/extensions/azure-functions-http/maven-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml @@ -9,7 +9,7 @@ to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an diff --git a/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/azure-config/function.json b/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/azure-config/function.json index 86d9f31b56892..416c0c0274e11 100644 --- a/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/azure-config/function.json +++ b/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/azure-config/function.json @@ -1,5 +1,5 @@ { - "scriptFile" : "../${project.build.finalName}-runner.jar", + "scriptFile" : "../${project.build.finalName}.jar", "entryPoint" : "io.quarkus.azure.functions.resteasy.runtime.Function.run", "bindings" : [ { "type" : "httpTrigger", diff --git a/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/pom.xml b/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/pom.xml index 254fd89bb7cf8..24d48af000f54 100644 --- a/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/pom.xml +++ b/extensions/azure-functions-http/maven-archetype/src/main/resources/archetype-resources/pom.xml @@ -8,8 +8,8 @@ 3.8.1 true - 1.8 - 1.8 + 11 + 11 UTF-8 UTF-8 999-SNAPSHOT @@ -19,12 +19,13 @@ 3.1.0 3.0.0-M5 - 1.3.2 + 1.9.1 ${appName} ${appRegion} ${resourceGroup} ${function} ${project.build.directory}/azure-functions/${functionAppName} + uber-jar @@ -79,9 +80,6 @@ io.quarkus quarkus-maven-plugin ${quarkus-plugin.version} - - true - @@ -114,19 +112,19 @@ ${functionResourceGroup} ${functionAppName} ${functionAppRegion} + + + windows + 11 + + + + + - - - WEBSITE_RUN_FROM_PACKAGE - 1 - FUNCTIONS_EXTENSION_VERSION - ~2 - - - FUNCTIONS_WORKER_RUNTIME - java + ~3 @@ -179,24 +177,23 @@ - + + + + + com.coderplus.maven.plugins + copy-rename-maven-plugin + 1.0.1 + - copy-uberjar - install + rename-file + package - copy-resources + copy - true - ${stagingDirectory} - - - ${project.build.directory} - - ${project.artifactId}-${project.version}-runner.jar - - - + ${project.build.directory}/${project.artifactId}-${project.version}-runner.jar + ${stagingDirectory}/${project.artifactId}-${project.version}.jar diff --git a/extensions/azure-functions-http/pom.xml b/extensions/azure-functions-http/pom.xml index e9c4267f9f1cb..b68a683b1ab58 100644 --- a/extensions/azure-functions-http/pom.xml +++ b/extensions/azure-functions-http/pom.xml @@ -6,7 +6,7 @@ ~ you may not use this file except in compliance with the License. ~ You may obtain a copy of the License at ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ https://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, @@ -19,10 +19,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/azure-functions-http/runtime/pom.xml b/extensions/azure-functions-http/runtime/pom.xml index 5a2cf1ce6b60a..d77049dd8bcca 100644 --- a/extensions/azure-functions-http/runtime/pom.xml +++ b/extensions/azure-functions-http/runtime/pom.xml @@ -6,7 +6,6 @@ quarkus-azure-functions-http-parent io.quarkus 999-SNAPSHOT - ../ 4.0.0 @@ -26,6 +25,7 @@ org.graalvm.nativeimage svm + provided com.microsoft.azure.functions diff --git a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/BaseFunction.java b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/BaseFunction.java index 6f21cd88ce7cb..1f3f55629f98f 100644 --- a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/BaseFunction.java +++ b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/BaseFunction.java @@ -34,10 +34,13 @@ public class BaseFunction { private static final Logger log = Logger.getLogger("io.quarkus.azure"); - protected static final String deploymentStatus; + protected static String deploymentStatus; protected static boolean started = false; + protected static boolean bootstrapError = false; - static { + private static final int BUFFER_SIZE = 8096; + + protected static void initQuarkus() { StringWriter error = new StringWriter(); PrintWriter errorWriter = new PrintWriter(error, true); if (Application.currentApplication() == null) { // were we already bootstrapped? Needed for mock azure unit testing. @@ -48,7 +51,8 @@ public class BaseFunction { app.start(args); errorWriter.println("Quarkus bootstrapped successfully."); started = true; - } catch (Exception ex) { + } catch (Throwable ex) { + bootstrapError = true; errorWriter.println("Quarkus bootstrap failed."); ex.printStackTrace(errorWriter); } @@ -59,7 +63,7 @@ public class BaseFunction { deploymentStatus = error.toString(); } - protected HttpResponseMessage dispatch(HttpRequestMessage> request) { + protected HttpResponseMessage dispatch(HttpRequestMessage> request) { try { return nettyDispatch(request); } catch (Exception e) { @@ -69,7 +73,7 @@ protected HttpResponseMessage dispatch(HttpRequestMessage> requ } } - protected HttpResponseMessage nettyDispatch(HttpRequestMessage> request) + protected HttpResponseMessage nettyDispatch(HttpRequestMessage> request) throws Exception { String path = request.getUri().getRawPath(); String query = request.getUri().getRawQuery(); @@ -88,7 +92,7 @@ protected HttpResponseMessage nettyDispatch(HttpRequestMessage> HttpContent requestContent = LastHttpContent.EMPTY_LAST_CONTENT; if (request.getBody().isPresent()) { - ByteBuf body = Unpooled.wrappedBuffer(request.getBody().get()); + ByteBuf body = Unpooled.wrappedBuffer(request.getBody().get().getBytes()); requestContent = new DefaultLastHttpContent(body); } @@ -106,7 +110,7 @@ protected HttpResponseMessage nettyDispatch(HttpRequestMessage> private static ByteArrayOutputStream createByteStream() { ByteArrayOutputStream baos; - baos = new ByteArrayOutputStream(500); + baos = new ByteArrayOutputStream(BUFFER_SIZE); return baos; } @@ -115,9 +119,9 @@ private static class ResponseHandler implements VirtualResponseHandler { ByteArrayOutputStream baos; WritableByteChannel byteChannel; CompletableFuture future = new CompletableFuture<>(); - final HttpRequestMessage> request; + final HttpRequestMessage> request; - public ResponseHandler(HttpRequestMessage> request) { + public ResponseHandler(HttpRequestMessage> request) { this.request = request; } @@ -136,7 +140,6 @@ public void handleMessage(Object msg) { if (msg instanceof HttpContent) { HttpContent content = (HttpContent) msg; if (baos == null) { - // todo what is right size? baos = createByteStream(); } int readable = content.content().readableBytes(); diff --git a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java index 9c178b7d53b2c..94a73ac9f9c85 100644 --- a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java +++ b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java @@ -12,9 +12,12 @@ public class Function extends BaseFunction { public HttpResponseMessage run( - @HttpTrigger(name = "req") HttpRequestMessage> request, + @HttpTrigger(name = "req") HttpRequestMessage> request, final ExecutionContext context) { - if (!started) { + if (!started && !bootstrapError) { + initQuarkus(); + } + if (bootstrapError) { HttpResponseMessage.Builder responseBuilder = request .createResponseBuilder(HttpStatus.valueOf(500)).body( deploymentStatus.getBytes(StandardCharsets.UTF_8)); diff --git a/extensions/azure-functions-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/azure-functions-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 7bb126a9fd9e0..628632236297d 100644 --- a/extensions/azure-functions-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/azure-functions-http/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -8,3 +8,8 @@ metadata: - "cloud" guide: "https://quarkus.io/guides/azure-functions-http" status: "preview" + codestart: + name: "azure-functions-http" + kind: "singleton-example" + languages: "java" + artifact: "io.quarkus:quarkus-descriptor-json" diff --git a/extensions/cache/deployment/pom.xml b/extensions/cache/deployment/pom.xml index 8188d8178cb88..7f5f0197b7714 100644 --- a/extensions/cache/deployment/pom.xml +++ b/extensions/cache/deployment/pom.xml @@ -8,7 +8,6 @@ quarkus-cache-parent io.quarkus 999-SNAPSHOT - ../ quarkus-cache-deployment diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheAnnotationsTransformer.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheAnnotationsTransformer.java index 5e28635699d07..da9123645f01d 100644 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheAnnotationsTransformer.java +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheAnnotationsTransformer.java @@ -1,109 +1,52 @@ package io.quarkus.cache.deployment; import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE_ALL; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE_ALL_LIST; import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE_LIST; import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_KEY; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_KEY_PARAMETER_POSITIONS_PARAM; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_NAME_PARAM; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_KEY_PARAMETER_POSITIONS; import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_RESULT; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.LOCK_TIMEOUT_PARAM; +import static org.jboss.jandex.AnnotationInstance.create; +import static org.jboss.jandex.AnnotationTarget.Kind.METHOD; +import static org.jboss.jandex.AnnotationValue.createArrayValue; +import static org.jboss.jandex.AnnotationValue.createShortValue; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import org.jboss.jandex.AnnotationInstance; -import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.AnnotationTarget.Kind; import org.jboss.jandex.AnnotationValue; -import org.jboss.jandex.DotName; import org.jboss.jandex.MethodInfo; import io.quarkus.arc.processor.AnnotationsTransformer; -import io.quarkus.cache.runtime.CacheInvalidateAllInterceptorBinding; -import io.quarkus.cache.runtime.CacheInvalidateInterceptorBinding; -import io.quarkus.cache.runtime.CacheResultInterceptorBinding; public class CacheAnnotationsTransformer implements AnnotationsTransformer { @Override public boolean appliesTo(Kind kind) { - return Kind.METHOD == kind; + return kind == METHOD; } @Override public void transform(TransformationContext context) { MethodInfo method = context.getTarget().asMethod(); - List interceptorBindings = new ArrayList<>(); - for (AnnotationInstance annotation : method.annotations()) { - AnnotationTarget target = annotation.target(); - if (target.kind() == Kind.METHOD) { - if (CACHE_INVALIDATE_ALL.equals(annotation.name())) { - interceptorBindings.add(createCacheInvalidateAllBinding(annotation, target)); - } else if (CACHE_INVALIDATE_ALL_LIST.equals(annotation.name())) { - for (AnnotationInstance nestedAnnotation : annotation.value("value").asNestedArray()) { - interceptorBindings.add(createCacheInvalidateAllBinding(nestedAnnotation, target)); - } - } else if (CACHE_INVALIDATE.equals(annotation.name())) { - interceptorBindings.add(createCacheInvalidateBinding(method, annotation, target)); - } else if (CACHE_INVALIDATE_LIST.equals(annotation.name())) { - for (AnnotationInstance nestedAnnotation : annotation.value("value").asNestedArray()) { - interceptorBindings.add(createCacheInvalidateBinding(method, nestedAnnotation, target)); - } - } else if (CACHE_RESULT.equals(annotation.name())) { - interceptorBindings.add(createCacheResultBinding(method, annotation, target)); - } + if (requiresCacheKeyParameterPositionsInterceptorBinding(method)) { + List positions = new ArrayList<>(); + for (AnnotationInstance annotation : method.annotations(CACHE_KEY)) { + positions.add(createShortValue("", annotation.target().asMethodParameter().position())); } - } - context.transform().addAll(interceptorBindings).done(); - } - - private AnnotationInstance createCacheInvalidateAllBinding(AnnotationInstance annotation, AnnotationTarget target) { - return createBinding(CacheInvalidateAllInterceptorBinding.class, target, getCacheName(annotation)); - } - - private AnnotationInstance createCacheInvalidateBinding(MethodInfo method, AnnotationInstance annotation, - AnnotationTarget target) { - List parameters = new ArrayList<>(); - parameters.add(getCacheName(annotation)); - findCacheKeyParameters(method).ifPresent(parameters::add); - return createBinding(CacheInvalidateInterceptorBinding.class, target, toArray(parameters)); - } - - private AnnotationInstance createCacheResultBinding(MethodInfo method, AnnotationInstance annotation, - AnnotationTarget target) { - List parameters = new ArrayList<>(); - parameters.add(getCacheName(annotation)); - findCacheKeyParameters(method).ifPresent(parameters::add); - findLockTimeout(annotation).ifPresent(parameters::add); - return createBinding(CacheResultInterceptorBinding.class, target, toArray(parameters)); - } - - private AnnotationInstance createBinding(Class bindingClass, AnnotationTarget target, AnnotationValue... values) { - return AnnotationInstance.create(DotName.createSimple(bindingClass.getName()), target, values); - } - - private AnnotationValue getCacheName(AnnotationInstance annotation) { - return annotation.value(CACHE_NAME_PARAM); - } - - private Optional findCacheKeyParameters(MethodInfo method) { - List parameters = new ArrayList<>(); - for (AnnotationInstance annotation : method.annotations()) { - if (annotation.target().kind() == Kind.METHOD_PARAMETER && CACHE_KEY.equals(annotation.name())) { - parameters.add(AnnotationValue.createShortValue("", annotation.target().asMethodParameter().position())); + if (!positions.isEmpty()) { + AnnotationValue annotationValue = createArrayValue("value", toArray(positions)); + AnnotationInstance binding = create(CACHE_KEY_PARAMETER_POSITIONS, method, + new AnnotationValue[] { annotationValue }); + context.transform().add(binding).done(); } } - if (parameters.isEmpty()) { - return Optional.empty(); - } - return Optional.of(AnnotationValue.createArrayValue(CACHE_KEY_PARAMETER_POSITIONS_PARAM, toArray(parameters))); } - private Optional findLockTimeout(AnnotationInstance annotation) { - return Optional.ofNullable(annotation.value(LOCK_TIMEOUT_PARAM)); + private boolean requiresCacheKeyParameterPositionsInterceptorBinding(MethodInfo method) { + return method.hasAnnotation(CACHE_KEY) && (method.hasAnnotation(CACHE_INVALIDATE) + || method.hasAnnotation(CACHE_INVALIDATE_LIST) || method.hasAnnotation(CACHE_RESULT)); } private AnnotationValue[] toArray(List parameters) { diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheConfig.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheConfig.java index 9a23c59c6a80e..fa87ec3073a9d 100644 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheConfig.java +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheConfig.java @@ -14,6 +14,12 @@ @ConfigRoot public class CacheConfig { + /** + * Whether or not the cache extension is enabled. + */ + @ConfigItem(defaultValue = "true") + public boolean enabled; + /** * Cache type. */ diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheDeploymentConstants.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheDeploymentConstants.java index 0116dadc22830..3cc4b256e6c94 100644 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheDeploymentConstants.java +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheDeploymentConstants.java @@ -8,26 +8,37 @@ import io.quarkus.cache.CacheInvalidate; import io.quarkus.cache.CacheInvalidateAll; import io.quarkus.cache.CacheKey; +import io.quarkus.cache.CacheName; import io.quarkus.cache.CacheResult; +import io.quarkus.cache.runtime.CacheInvalidateAllInterceptor; +import io.quarkus.cache.runtime.CacheInvalidateInterceptor; +import io.quarkus.cache.runtime.CacheKeyParameterPositions; +import io.quarkus.cache.runtime.CacheResultInterceptor; public class CacheDeploymentConstants { // API annotations names. + public static final DotName CACHE_NAME = dotName(CacheName.class); public static final DotName CACHE_INVALIDATE_ALL = dotName(CacheInvalidateAll.class); public static final DotName CACHE_INVALIDATE_ALL_LIST = dotName(CacheInvalidateAll.List.class); public static final DotName CACHE_INVALIDATE = dotName(CacheInvalidate.class); public static final DotName CACHE_INVALIDATE_LIST = dotName(CacheInvalidate.List.class); public static final DotName CACHE_RESULT = dotName(CacheResult.class); public static final DotName CACHE_KEY = dotName(CacheKey.class); - public static final List API_METHODS_ANNOTATIONS = Arrays.asList( - CACHE_RESULT, CACHE_INVALIDATE, CACHE_INVALIDATE_ALL); - public static final List API_METHODS_ANNOTATIONS_LISTS = Arrays.asList( - CACHE_INVALIDATE_LIST, CACHE_INVALIDATE_ALL_LIST); + public static final List INTERCEPTOR_BINDINGS = Arrays.asList(CACHE_RESULT, CACHE_INVALIDATE, + CACHE_INVALIDATE_ALL); + public static final List INTERCEPTOR_BINDING_CONTAINERS = Arrays.asList(CACHE_INVALIDATE_LIST, + CACHE_INVALIDATE_ALL_LIST); + public static final List INTERCEPTORS = Arrays.asList(dotName(CacheInvalidateAllInterceptor.class), + dotName(CacheInvalidateInterceptor.class), dotName(CacheResultInterceptor.class)); + public static final DotName CACHE_KEY_PARAMETER_POSITIONS = dotName(CacheKeyParameterPositions.class); + + // MicroProfile REST Client. + public static final DotName REGISTER_REST_CLIENT = DotName + .createSimple("org.eclipse.microprofile.rest.client.inject.RegisterRestClient"); // Annotations parameters. public static final String CACHE_NAME_PARAM = "cacheName"; - public static final String CACHE_KEY_PARAMETER_POSITIONS_PARAM = "cacheKeyParameterPositions"; - public static final String LOCK_TIMEOUT_PARAM = "lockTimeout"; // Caffeine. public static final String CAFFEINE_CACHE_TYPE = "caffeine"; diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheMethodValidator.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheMethodValidator.java deleted file mode 100644 index ebcc057f0c573..0000000000000 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheMethodValidator.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.quarkus.cache.deployment; - -import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_RESULT; - -import java.util.List; - -import org.jboss.jandex.AnnotationInstance; -import org.jboss.jandex.MethodInfo; -import org.jboss.jandex.Type.Kind; - -import io.quarkus.arc.processor.AnnotationStore; -import io.quarkus.arc.processor.BeanInfo; -import io.quarkus.cache.deployment.exception.IllegalReturnTypeException; - -public class CacheMethodValidator { - - public static void validateAnnotations(AnnotationStore annotationStore, BeanInfo bean, MethodInfo method, - List throwables) { - - AnnotationInstance cacheResult = annotationStore.getAnnotation(method, CACHE_RESULT); - if (cacheResult != null && method.returnType().kind() == Kind.VOID) { - String exceptionMessage = "The @CacheResult annotation is not allowed on a method returning void: [class= " - + bean.getBeanClass() + ", method= " + method + "]"; - throwables.add(new IllegalReturnTypeException(exceptionMessage)); - } - } -} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheNamesBuildItem.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheNamesBuildItem.java new file mode 100644 index 0000000000000..2d3c6807e6f70 --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheNamesBuildItem.java @@ -0,0 +1,21 @@ +package io.quarkus.cache.deployment; + +import java.util.Set; + +import io.quarkus.builder.item.SimpleBuildItem; + +/** + * This build item is used to pass the full list of cache names from the validation step to the recording step. + */ +public final class CacheNamesBuildItem extends SimpleBuildItem { + + private final Set names; + + public CacheNamesBuildItem(Set names) { + this.names = names; + } + + public Set getNames() { + return names; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheProcessor.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheProcessor.java index 4358b4d236b60..8f562a9fc0567 100644 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheProcessor.java +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CacheProcessor.java @@ -1,40 +1,53 @@ package io.quarkus.cache.deployment; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.API_METHODS_ANNOTATIONS; -import static io.quarkus.cache.deployment.CacheDeploymentConstants.API_METHODS_ANNOTATIONS_LISTS; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_INVALIDATE_LIST; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_KEY; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_NAME; import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_NAME_PARAM; -import static io.quarkus.deployment.annotations.ExecutionTime.RUNTIME_INIT; -import static org.jboss.jandex.AnnotationTarget.Kind.METHOD; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.CACHE_RESULT; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.INTERCEPTORS; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.INTERCEPTOR_BINDINGS; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.INTERCEPTOR_BINDING_CONTAINERS; +import static io.quarkus.cache.deployment.CacheDeploymentConstants.REGISTER_REST_CLIENT; +import static io.quarkus.deployment.annotations.ExecutionTime.STATIC_INIT; +import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.function.Supplier; +import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.spi.DeploymentException; import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.AnnotationTarget.Kind; +import org.jboss.jandex.ClassInfo; import org.jboss.jandex.DotName; -import org.jboss.jandex.IndexView; import org.jboss.jandex.MethodInfo; +import org.jboss.jandex.Type; -import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem; -import io.quarkus.arc.deployment.BeanContainerBuildItem; -import io.quarkus.arc.deployment.ValidationPhaseBuildItem; +import io.quarkus.arc.deployment.AutoInjectAnnotationBuildItem; +import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; -import io.quarkus.arc.processor.AnnotationStore; -import io.quarkus.arc.processor.BeanInfo; -import io.quarkus.arc.processor.BuildExtension.Key; -import io.quarkus.cache.runtime.CacheInvalidateAllInterceptor; -import io.quarkus.cache.runtime.CacheInvalidateInterceptor; -import io.quarkus.cache.runtime.CacheResultInterceptor; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.deployment.exception.ClassTargetException; +import io.quarkus.cache.deployment.exception.PrivateMethodTargetException; +import io.quarkus.cache.deployment.exception.UnknownCacheNameException; +import io.quarkus.cache.deployment.exception.UnsupportedRepeatedAnnotationException; +import io.quarkus.cache.deployment.exception.VoidReturnTypeTargetException; import io.quarkus.cache.runtime.caffeine.CaffeineCacheBuildRecorder; import io.quarkus.cache.runtime.caffeine.CaffeineCacheInfo; +import io.quarkus.cache.runtime.noop.NoOpCacheBuildRecorder; import io.quarkus.deployment.Feature; +import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.FeatureBuildItem; @@ -46,71 +59,194 @@ FeatureBuildItem feature() { } @BuildStep - AnnotationsTransformerBuildItem annotationsTransformer() { - return new AnnotationsTransformerBuildItem(new CacheAnnotationsTransformer()); + AutoInjectAnnotationBuildItem autoInjectCacheName() { + return new AutoInjectAnnotationBuildItem(CACHE_NAME); } @BuildStep - List additionalBeans() { - return Arrays.asList( - new AdditionalBeanBuildItem(CacheInvalidateAllInterceptor.class), - new AdditionalBeanBuildItem(CacheInvalidateInterceptor.class), - new AdditionalBeanBuildItem(CacheResultInterceptor.class)); + AnnotationsTransformerBuildItem annotationsTransformer() { + return new AnnotationsTransformerBuildItem(new CacheAnnotationsTransformer()); } @BuildStep - ValidationErrorBuildItem validateBeanDeployment(ValidationPhaseBuildItem validationPhase) { - AnnotationStore annotationStore = validationPhase.getContext().get(Key.ANNOTATION_STORE); + void validateCacheAnnotationsAndProduceCacheNames(CombinedIndexBuildItem combinedIndex, + List additionalCacheNames, BuildProducer validationErrors, + BuildProducer cacheNames) { + + // Validation errors produced by this build step. List throwables = new ArrayList<>(); - for (BeanInfo bean : validationPhase.getContext().get(Key.BEANS)) { - if (bean.isClassBean()) { - for (MethodInfo method : bean.getTarget().get().asClass().methods()) { - if (annotationStore.hasAnyAnnotation(method, API_METHODS_ANNOTATIONS)) { - CacheMethodValidator.validateAnnotations(annotationStore, bean, method, throwables); + // Cache names produced by this build step. + Set names = new HashSet<>(); + + /* + * First, for each non-repeated cache interceptor binding: + * - its target is validated + * - the corresponding cache name is collected + */ + for (DotName bindingName : INTERCEPTOR_BINDINGS) { + for (AnnotationInstance binding : combinedIndex.getIndex().getAnnotations(bindingName)) { + throwables.addAll(validateInterceptorBindingTarget(binding, binding.target())); + if (binding.target().kind() == Kind.METHOD) { + /* + * Cache names from the interceptor bindings placed on cache interceptors must not be collected to prevent + * the instantiation of a cache with an empty name. + */ + names.add(binding.value(CACHE_NAME_PARAM).asString()); + } + } + } + + // The exact same things need to be done for repeated cache interceptor bindings. + for (DotName containerName : INTERCEPTOR_BINDING_CONTAINERS) { + for (AnnotationInstance container : combinedIndex.getIndex().getAnnotations(containerName)) { + for (AnnotationInstance binding : container.value("value").asNestedArray()) { + throwables.addAll(validateInterceptorBindingTarget(binding, container.target())); + names.add(binding.value(CACHE_NAME_PARAM).asString()); + } + /* + * Interception from repeated interceptor bindings won't work with the CDI implementation from MicroProfile REST + * Client. Using repeated interceptor bindings on a method from a class annotated with @RegisterRestClient must + * therefore be forbidden. + */ + if (container.target().kind() == Kind.METHOD) { + MethodInfo methodInfo = container.target().asMethod(); + if (methodInfo.declaringClass().classAnnotation(REGISTER_REST_CLIENT) != null) { + throwables.add(new UnsupportedRepeatedAnnotationException(methodInfo)); } } } } - return new ValidationErrorBuildItem(throwables.toArray(new Throwable[0])); - } - @BuildStep - @Record(RUNTIME_INIT) - void recordCachesBuild(CombinedIndexBuildItem combinedIndex, BeanContainerBuildItem beanContainer, CacheConfig config, - CaffeineCacheBuildRecorder caffeineRecorder, - List additionalCacheNames) { - Set cacheNames = getCacheNames(combinedIndex.getIndex()); + /* + * Before @CacheName can be validated, additional cache names provided by other extensions must be added to the cache + * names collection built above. + */ for (AdditionalCacheNameBuildItem additionalCacheName : additionalCacheNames) { - cacheNames.add(additionalCacheName.getName()); + names.add(additionalCacheName.getName()); } - switch (config.type) { - case CacheDeploymentConstants.CAFFEINE_CACHE_TYPE: - Set cacheInfos = CaffeineCacheInfoBuilder.build(cacheNames, config); - caffeineRecorder.buildCaches(beanContainer.getValue(), cacheInfos); + + // @CacheName can now be validated. + for (AnnotationInstance qualifier : combinedIndex.getIndex().getAnnotations(CACHE_NAME)) { + String cacheName = qualifier.value().asString(); + AnnotationTarget target = qualifier.target(); + switch (target.kind()) { + case FIELD: + if (!names.contains(cacheName)) { + ClassInfo declaringClass = target.asField().declaringClass(); + throwables.add(new UnknownCacheNameException(declaringClass.name(), cacheName)); + } + break; + case METHOD: + /* + * This should only happen in CacheProducer. It'd be nice if we could forbid using @CacheName in any other + * class, but Arc throws an AmbiguousResolutionException before we get a chance to validate things here. + */ + break; + case METHOD_PARAMETER: + if (!names.contains(cacheName)) { + ClassInfo declaringClass = target.asMethodParameter().method().declaringClass(); + throwables.add(new UnknownCacheNameException(declaringClass.name(), cacheName)); + } + break; + default: + // This should never be thrown. + throw new DeploymentException("Unexpected @CacheName target: " + target.kind()); + } + } + + validationErrors.produce(new ValidationErrorBuildItem(throwables.toArray(new Throwable[0]))); + cacheNames.produce(new CacheNamesBuildItem(names)); + } + + private List validateInterceptorBindingTarget(AnnotationInstance binding, AnnotationTarget target) { + List throwables = new ArrayList<>(); + switch (target.kind()) { + case CLASS: + ClassInfo classInfo = target.asClass(); + if (!INTERCEPTORS.contains(classInfo.name())) { + throwables.add(new ClassTargetException(classInfo.name(), binding.name())); + } + break; + case METHOD: + MethodInfo methodInfo = target.asMethod(); + if (Modifier.isPrivate(methodInfo.flags())) { + throwables.add(new PrivateMethodTargetException(methodInfo, binding.name())); + } + if (CACHE_RESULT.equals(binding.name()) && methodInfo.returnType().kind() == Type.Kind.VOID) { + throwables.add(new VoidReturnTypeTargetException(methodInfo)); + } break; default: - throw new DeploymentException("Unknown cache type: " + config.type); + // This should never be thrown. + throw new DeploymentException("Unexpected cache interceptor binding target: " + target.kind()); } + return throwables; } - private Set getCacheNames(IndexView index) { - Set cacheNames = new HashSet<>(); - for (DotName cacheAnnotation : API_METHODS_ANNOTATIONS) { - for (AnnotationInstance annotation : index.getAnnotations(cacheAnnotation)) { - if (annotation.target().kind() == METHOD) { - cacheNames.add(annotation.value(CACHE_NAME_PARAM).asString()); - } + @BuildStep + @Record(STATIC_INIT) + SyntheticBeanBuildItem configureCacheManagerSyntheticBean(CacheNamesBuildItem cacheNames, CacheConfig config, + CaffeineCacheBuildRecorder caffeineRecorder, NoOpCacheBuildRecorder noOpRecorder) { + + Supplier cacheManagerSupplier; + if (config.enabled) { + switch (config.type) { + case CacheDeploymentConstants.CAFFEINE_CACHE_TYPE: + Set cacheInfos = CaffeineCacheInfoBuilder.build(cacheNames.getNames(), config); + cacheManagerSupplier = caffeineRecorder.getCacheManagerSupplier(cacheInfos); + break; + default: + throw new DeploymentException("Unknown cache type: " + config.type); } + } else { + cacheManagerSupplier = noOpRecorder.getCacheManagerSupplier(cacheNames.getNames()); } - for (DotName list : API_METHODS_ANNOTATIONS_LISTS) { - for (AnnotationInstance annotation : index.getAnnotations(list)) { - if (annotation.target().kind() == METHOD) { - for (AnnotationInstance nestedAnnotation : annotation.value("value").asNestedArray()) { - cacheNames.add(nestedAnnotation.value(CACHE_NAME_PARAM).asString()); + + return SyntheticBeanBuildItem.configure(CacheManager.class) + .scope(ApplicationScoped.class) + .supplier(cacheManagerSupplier) + .done(); + } + + @BuildStep + List enhanceRestClientMethods(CombinedIndexBuildItem combinedIndex) { + List bytecodeTransformers = new ArrayList<>(); + for (AnnotationInstance registerRestClientAnnotation : combinedIndex.getIndex().getAnnotations(REGISTER_REST_CLIENT)) { + if (registerRestClientAnnotation.target().kind() == Kind.CLASS) { + ClassInfo classInfo = registerRestClientAnnotation.target().asClass(); + for (MethodInfo methodInfo : classInfo.methods()) { + if (methodInfo.hasAnnotation(CACHE_INVALIDATE) || methodInfo.hasAnnotation(CACHE_INVALIDATE_LIST) + || methodInfo.hasAnnotation(CACHE_RESULT)) { + short[] cacheKeyParameterPositions = getCacheKeyParameterPositions(methodInfo); + /* + * The bytecode transformation is always performed even if `cacheKeyParameterPositions` is empty because + * the method parameters would be inspected using reflection at run time otherwise. + */ + bytecodeTransformers.add(new BytecodeTransformerBuildItem(classInfo.toString(), + new RestClientMethodEnhancer(methodInfo.name(), cacheKeyParameterPositions))); } } } } - return cacheNames; + return bytecodeTransformers; + } + + /** + * Returns an array containing the positions of the given method parameters annotated with + * {@link io.quarkus.cache.CacheKey @CacheKey}, or an empty array if no such parameter is found. + * + * @param methodInfo method info + * @return cache key parameters positions + */ + private short[] getCacheKeyParameterPositions(MethodInfo methodInfo) { + List positions = new ArrayList<>(); + for (AnnotationInstance annotation : methodInfo.annotations(CACHE_KEY)) { + positions.add(annotation.target().asMethodParameter().position()); + } + short[] result = new short[positions.size()]; + for (int i = 0; i < positions.size(); i++) { + result[i] = positions.get(i); + } + return result; } } diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CaffeineCacheInfoBuilder.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CaffeineCacheInfoBuilder.java index 1e61fdb56426a..87563be91a767 100644 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CaffeineCacheInfoBuilder.java +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CaffeineCacheInfoBuilder.java @@ -1,5 +1,6 @@ package io.quarkus.cache.deployment; +import java.util.Collections; import java.util.Set; import java.util.stream.Collectors; @@ -9,19 +10,21 @@ public class CaffeineCacheInfoBuilder { public static Set build(Set cacheNames, CacheConfig cacheConfig) { - return cacheNames.stream().map(cacheName -> { - CaffeineCacheInfo cacheInfo = new CaffeineCacheInfo(); - cacheInfo.name = cacheName; - - CaffeineNamespaceConfig namespaceConfig = cacheConfig.caffeine.namespace.get(cacheInfo.name); - if (namespaceConfig != null) { - namespaceConfig.initialCapacity.ifPresent(capacity -> cacheInfo.initialCapacity = capacity); - namespaceConfig.maximumSize.ifPresent(size -> cacheInfo.maximumSize = size); - namespaceConfig.expireAfterWrite.ifPresent(delay -> cacheInfo.expireAfterWrite = delay); - namespaceConfig.expireAfterAccess.ifPresent(delay -> cacheInfo.expireAfterAccess = delay); - } - - return cacheInfo; - }).collect(Collectors.toSet()); + if (cacheNames.isEmpty()) { + return Collections.emptySet(); + } else { + return cacheNames.stream().map(cacheName -> { + CaffeineCacheInfo cacheInfo = new CaffeineCacheInfo(); + cacheInfo.name = cacheName; + CaffeineNamespaceConfig namespaceConfig = cacheConfig.caffeine.namespace.get(cacheInfo.name); + if (namespaceConfig != null) { + namespaceConfig.initialCapacity.ifPresent(capacity -> cacheInfo.initialCapacity = capacity); + namespaceConfig.maximumSize.ifPresent(size -> cacheInfo.maximumSize = size); + namespaceConfig.expireAfterWrite.ifPresent(delay -> cacheInfo.expireAfterWrite = delay); + namespaceConfig.expireAfterAccess.ifPresent(delay -> cacheInfo.expireAfterAccess = delay); + } + return cacheInfo; + }).collect(Collectors.toSet()); + } } } diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/RestClientMethodEnhancer.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/RestClientMethodEnhancer.java new file mode 100644 index 0000000000000..c7b4e8810258e --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/RestClientMethodEnhancer.java @@ -0,0 +1,66 @@ +package io.quarkus.cache.deployment; + +import java.util.function.BiFunction; + +import org.objectweb.asm.AnnotationVisitor; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import io.quarkus.cache.runtime.CacheKeyParameterPositions; +import io.quarkus.gizmo.Gizmo; + +/* + * When a MicroProfile REST Client method annotated with a caching annotation is executed, an interception is performed by the + * MP REST Client interceptors implementation, which is not Arc. This implementation is unaware of the + * @CacheKeyParameterPositions annotation that we add at build time in CacheAnnotationsTransformer to avoid relying on + * reflection at run time to identify the cache key elements. Reflection is bad for performances because of underlying + * synchronized calls. This class does a similar job as CacheAnnotationsTransformer and since it relies on bytecode + * transformation, the @CacheKeyParameterPositions annotation is available during the MP REST Client cache interception. + */ +class RestClientMethodEnhancer implements BiFunction { + + private static final String CACHE_KEY_PARAMETER_POSITIONS_DESCRIPTOR = "L" + + CacheKeyParameterPositions.class.getName().replace('.', '/') + ";"; + + private final String methodName; + private final short[] cacheKeyParameterPositions; + + public RestClientMethodEnhancer(String methodName, short[] cacheKeyParameterPositions) { + this.methodName = methodName; + this.cacheKeyParameterPositions = cacheKeyParameterPositions; + } + + @Override + public ClassVisitor apply(String className, ClassVisitor classVisitor) { + return new ClassVisitor(Gizmo.ASM_API_VERSION, classVisitor) { + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, + String[] exceptions) { + + MethodVisitor superVisitor = super.visitMethod(access, name, descriptor, signature, exceptions); + if (!name.equals(methodName)) { + // This is not the method we want to enhance, let's skip the bytecode transformation. + return superVisitor; + } else { + return new MethodVisitor(Gizmo.ASM_API_VERSION, superVisitor) { + + @Override + public void visitEnd() { + + /* + * If the method parameters at positions 0 and 2 are annotated with @CacheKey, the following code + * will add the `@CacheKeyParameterPositions(value={0, 2})` annotation to the method. + */ + AnnotationVisitor annotation = super.visitAnnotation(CACHE_KEY_PARAMETER_POSITIONS_DESCRIPTOR, + true); + annotation.visit("value", cacheKeyParameterPositions); + annotation.visitEnd(); + super.visitEnd(); + } + }; + } + } + }; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/devconsole/DevConsoleProcessor.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/devconsole/DevConsoleProcessor.java new file mode 100644 index 0000000000000..f079dc2758f7a --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/devconsole/DevConsoleProcessor.java @@ -0,0 +1,25 @@ +package io.quarkus.cache.deployment.devconsole; + +import static io.quarkus.deployment.annotations.ExecutionTime.STATIC_INIT; + +import io.quarkus.cache.runtime.CaffeineCacheSupplier; +import io.quarkus.cache.runtime.devconsole.CacheDevConsoleRecorder; +import io.quarkus.deployment.IsDevelopment; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.devconsole.spi.DevConsoleRouteBuildItem; +import io.quarkus.devconsole.spi.DevConsoleRuntimeTemplateInfoBuildItem; + +public class DevConsoleProcessor { + + @BuildStep(onlyIf = IsDevelopment.class) + public DevConsoleRuntimeTemplateInfoBuildItem collectBeanInfo() { + return new DevConsoleRuntimeTemplateInfoBuildItem("cacheInfos", new CaffeineCacheSupplier()); + } + + @BuildStep + @Record(value = STATIC_INIT, optional = true) + DevConsoleRouteBuildItem invokeEndpoint(CacheDevConsoleRecorder recorder) { + return new DevConsoleRouteBuildItem("caches", "POST", recorder.clearCacheHandler()); + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/ClassTargetException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/ClassTargetException.java new file mode 100644 index 0000000000000..d989baa4907f6 --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/ClassTargetException.java @@ -0,0 +1,24 @@ +package io.quarkus.cache.deployment.exception; + +import org.jboss.jandex.DotName; + +/** + * This exception is thrown at build time during the validation phase if a class is annotated with + * {@link io.quarkus.cache.CacheInvalidate @CacheInvalidate}, {@link io.quarkus.cache.CacheInvalidateAll @CacheInvalidateAll} or + * {@link io.quarkus.cache.CacheResult @CacheResult}. These annotations are only allowed at type level for the caching + * interceptors from this extension. + */ +@SuppressWarnings("serial") +public class ClassTargetException extends RuntimeException { + + private final DotName className; + + public ClassTargetException(DotName className, DotName annotationName) { + super("Caching annotations are not allowed on a class [class=" + className + ", annotation=" + annotationName + "]"); + this.className = className; + } + + public DotName getClassName() { + return className; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/IllegalReturnTypeException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/IllegalReturnTypeException.java deleted file mode 100644 index 197715fe29046..0000000000000 --- a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/IllegalReturnTypeException.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.quarkus.cache.deployment.exception; - -@SuppressWarnings("serial") -public class IllegalReturnTypeException extends RuntimeException { - - public IllegalReturnTypeException(String message) { - super(message); - } -} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/PrivateMethodTargetException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/PrivateMethodTargetException.java new file mode 100644 index 0000000000000..de1b85baa3da5 --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/PrivateMethodTargetException.java @@ -0,0 +1,25 @@ +package io.quarkus.cache.deployment.exception; + +import org.jboss.jandex.DotName; +import org.jboss.jandex.MethodInfo; + +/** + * This exception is thrown at build time during the validation phase if a private method is annotated with + * {@link io.quarkus.cache.CacheInvalidate @CacheInvalidate}, {@link io.quarkus.cache.CacheInvalidateAll @CacheInvalidateAll} or + * {@link io.quarkus.cache.CacheResult @CacheResult}. + */ +@SuppressWarnings("serial") +public class PrivateMethodTargetException extends RuntimeException { + + private final MethodInfo methodInfo; + + public PrivateMethodTargetException(MethodInfo methodInfo, DotName annotationName) { + super("Caching annotations are not allowed on a private method [class=" + methodInfo.declaringClass().name() + + ", method=" + methodInfo.name() + ", annotation=" + annotationName + "]"); + this.methodInfo = methodInfo; + } + + public MethodInfo getMethodInfo() { + return methodInfo; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnknownCacheNameException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnknownCacheNameException.java new file mode 100644 index 0000000000000..a1e91a78b4421 --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnknownCacheNameException.java @@ -0,0 +1,26 @@ +package io.quarkus.cache.deployment.exception; + +import org.jboss.jandex.DotName; + +/** + * This exception is thrown at build time during the validation phase if a field, a constructor or a method parameter annotated + * with {@link io.quarkus.cache.CacheName @CacheName} is referencing an unknown cache name. A cache name is unknown if it is not + * used in any {@link io.quarkus.cache.CacheInvalidate @CacheInvalidate}, + * {@link io.quarkus.cache.CacheInvalidateAll @CacheInvalidateAll} or {@link io.quarkus.cache.CacheResult @CacheResult} + * annotation. + */ +@SuppressWarnings("serial") +public class UnknownCacheNameException extends RuntimeException { + + private final String cacheName; + + public UnknownCacheNameException(DotName className, String cacheName) { + super("A field or method parameter is annotated with a @CacheName annotation referencing an unknown cache name [class=" + + className + ", cacheName=" + cacheName + "]"); + this.cacheName = cacheName; + } + + public String getCacheName() { + return cacheName; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnsupportedRepeatedAnnotationException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnsupportedRepeatedAnnotationException.java new file mode 100644 index 0000000000000..a3ed104ddb50b --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/UnsupportedRepeatedAnnotationException.java @@ -0,0 +1,25 @@ +package io.quarkus.cache.deployment.exception; + +import org.jboss.jandex.MethodInfo; + +/** + * This exception is thrown at build time during the validation phase if a method from a MicroProfile REST Client bean is + * annotated with repeated {@link io.quarkus.cache.CacheInvalidate @CacheInvalidate} or + * {@link io.quarkus.cache.CacheInvalidateAll @CacheInvalidateAll} annotations. Interceptions on such a bean are not managed by + * Arc and the usage of repeated interceptor bindings is not currently supported. + */ +@SuppressWarnings("serial") +public class UnsupportedRepeatedAnnotationException extends RuntimeException { + + private final MethodInfo methodInfo; + + public UnsupportedRepeatedAnnotationException(MethodInfo methodInfo) { + super("Repeated caching annotations on a method from a class annotated with @RegisterRestClient are not currently supported [class=" + + methodInfo.declaringClass().name() + ", method=" + methodInfo.name() + "]"); + this.methodInfo = methodInfo; + } + + public MethodInfo getMethodInfo() { + return methodInfo; + } +} diff --git a/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/VoidReturnTypeTargetException.java b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/VoidReturnTypeTargetException.java new file mode 100644 index 0000000000000..7ba13fb12e659 --- /dev/null +++ b/extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/VoidReturnTypeTargetException.java @@ -0,0 +1,23 @@ +package io.quarkus.cache.deployment.exception; + +import org.jboss.jandex.MethodInfo; + +/** + * This exception is thrown at build time during the validation phase if a method returning void is annotated with + * {@link io.quarkus.cache.CacheResult @CacheResult}. + */ +@SuppressWarnings("serial") +public class VoidReturnTypeTargetException extends RuntimeException { + + private final MethodInfo methodInfo; + + public VoidReturnTypeTargetException(MethodInfo methodInfo) { + super("@CacheResult is not allowed on a method returning void [class=" + methodInfo.declaringClass().name() + + ", method=" + methodInfo.name() + "]"); + this.methodInfo = methodInfo; + } + + public MethodInfo getMethodInfo() { + return methodInfo; + } +} diff --git a/extensions/cache/deployment/src/main/resources/dev-templates/caches.html b/extensions/cache/deployment/src/main/resources/dev-templates/caches.html new file mode 100644 index 0000000000000..6a10705f6a4d9 --- /dev/null +++ b/extensions/cache/deployment/src/main/resources/dev-templates/caches.html @@ -0,0 +1,35 @@ +{#include main} +{#style} +.annotation { +color: gray; +} +{/style} +{#title}Caches{/title} +{#body} + + + + + + + + + {#for cacheInfo in info:cacheInfos} + + + + {/for} + +
    NameSize
    + {cacheInfo.name} + +
    + + + +
    +
    +{/body} +{/include} diff --git a/extensions/cache/deployment/src/main/resources/dev-templates/embedded.html b/extensions/cache/deployment/src/main/resources/dev-templates/embedded.html new file mode 100644 index 0000000000000..ea948f9198934 --- /dev/null +++ b/extensions/cache/deployment/src/main/resources/dev-templates/embedded.html @@ -0,0 +1,3 @@ + + + Caches {info:cacheInfos.size()} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheConfigTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheConfigTest.java index d2cfac8cd281f..013daa0dd8b94 100644 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheConfigTest.java +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheConfigTest.java @@ -13,8 +13,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; +import io.quarkus.cache.CacheManager; import io.quarkus.cache.CacheResult; -import io.quarkus.cache.runtime.CacheRepository; import io.quarkus.cache.runtime.caffeine.CaffeineCache; import io.quarkus.test.QuarkusUnitTest; @@ -29,11 +29,11 @@ public class CacheConfigTest { private static final String CACHE_NAME = "test-cache"; @Inject - CacheRepository cacheRepository; + CacheManager cacheManager; @Test public void testConfig() { - CaffeineCache cache = (CaffeineCache) cacheRepository.getCache(CACHE_NAME); + CaffeineCache cache = (CaffeineCache) cacheManager.getCache(CACHE_NAME).get(); assertEquals(10, cache.getInitialCapacity()); assertEquals(100L, cache.getMaximumSize()); assertEquals(Duration.ofSeconds(30L), cache.getExpireAfterWrite()); diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheResultVoidReturnTypeTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheResultVoidReturnTypeTest.java deleted file mode 100644 index 07308282c7b83..0000000000000 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/CacheResultVoidReturnTypeTest.java +++ /dev/null @@ -1,41 +0,0 @@ -package io.quarkus.cache.test.deployment; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -import javax.enterprise.inject.spi.DeploymentException; -import javax.ws.rs.GET; -import javax.ws.rs.Path; - -import org.jboss.shrinkwrap.api.ShrinkWrap; -import org.jboss.shrinkwrap.api.spec.JavaArchive; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.cache.CacheResult; -import io.quarkus.cache.deployment.exception.IllegalReturnTypeException; -import io.quarkus.test.QuarkusUnitTest; - -public class CacheResultVoidReturnTypeTest { - - @RegisterExtension - static final QuarkusUnitTest TEST = new QuarkusUnitTest() - .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(TestResource.class)).assertException(e -> { - assertEquals(DeploymentException.class, e.getClass()); - assertEquals(IllegalReturnTypeException.class, e.getCause().getClass()); - }); - - @Test - public void shouldNotBeInvoked() { - fail("This method should not be invoked"); - } - - @Path("/test") - static class TestResource { - - @GET - @CacheResult(cacheName = "test-cache") - public void shouldThrowDeploymentException(String key) { - } - } -} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DeploymentExceptionsTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DeploymentExceptionsTest.java new file mode 100644 index 0000000000000..892918f5ba7bf --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DeploymentExceptionsTest.java @@ -0,0 +1,123 @@ +package io.quarkus.cache.test.deployment; + +import static java.util.Arrays.stream; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.stream.Stream; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.spi.DeploymentException; +import javax.ws.rs.GET; +import javax.ws.rs.Path; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheInvalidate; +import io.quarkus.cache.CacheInvalidateAll; +import io.quarkus.cache.CacheName; +import io.quarkus.cache.CacheResult; +import io.quarkus.cache.deployment.exception.ClassTargetException; +import io.quarkus.cache.deployment.exception.PrivateMethodTargetException; +import io.quarkus.cache.deployment.exception.UnknownCacheNameException; +import io.quarkus.cache.deployment.exception.VoidReturnTypeTargetException; +import io.quarkus.test.QuarkusUnitTest; + +/** + * This class tests many kinds of {@link DeploymentException} causes related to caching annotations. + */ +public class DeploymentExceptionsTest { + + private static final String UNKNOWN_CACHE_1 = "unknown-cache-1"; + private static final String UNKNOWN_CACHE_2 = "unknown-cache-2"; + private static final String UNKNOWN_CACHE_3 = "unknown-cache-3"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClasses(TestResource.class, TestBean.class)) + .assertException(t -> { + assertEquals(DeploymentException.class, t.getClass()); + assertEquals(10, t.getSuppressed().length); + assertPrivateMethodTargetException(t, "shouldThrowPrivateMethodTargetException", 1); + assertPrivateMethodTargetException(t, "shouldAlsoThrowPrivateMethodTargetException", 2); + assertVoidReturnTypeTargetException(t, "showThrowVoidReturnTypeTargetException"); + assertClassTargetException(t, TestResource.class, 1); + assertClassTargetException(t, TestBean.class, 2); + assertUnknownCacheNameException(t, UNKNOWN_CACHE_1); + assertUnknownCacheNameException(t, UNKNOWN_CACHE_2); + assertUnknownCacheNameException(t, UNKNOWN_CACHE_3); + }); + + private static void assertPrivateMethodTargetException(Throwable t, String expectedMethodName, long expectedCount) { + assertEquals(expectedCount, filterSuppressed(t, PrivateMethodTargetException.class) + .filter(s -> expectedMethodName.equals(s.getMethodInfo().name())).count()); + } + + private static void assertVoidReturnTypeTargetException(Throwable t, String expectedMethodName) { + assertEquals(1, filterSuppressed(t, VoidReturnTypeTargetException.class) + .filter(s -> expectedMethodName.equals(s.getMethodInfo().name())).count()); + } + + private static void assertClassTargetException(Throwable t, Class expectedClassName, long expectedCount) { + assertEquals(expectedCount, filterSuppressed(t, ClassTargetException.class) + .filter(s -> expectedClassName.getName().equals(s.getClassName().toString())).count()); + } + + private static void assertUnknownCacheNameException(Throwable t, String expectedCacheName) { + assertEquals(1, filterSuppressed(t, UnknownCacheNameException.class) + .filter(s -> expectedCacheName.equals(s.getCacheName())).count()); + } + + private static Stream filterSuppressed(Throwable t, Class filterClass) { + return stream(t.getSuppressed()).filter(filterClass::isInstance).map(filterClass::cast); + } + + @Test + public void shouldNotBeInvoked() { + fail("This method should not be invoked"); + } + + @Path("/test") + // Single annotation test. + @CacheInvalidate(cacheName = "should-throw-class-target-exception") + static class TestResource { + + @GET + // Single annotation test. + @CacheInvalidateAll(cacheName = "should-throw-private-method-target-exception") + private void shouldThrowPrivateMethodTargetException() { + } + + @GET + // Repeated annotations test. + @CacheInvalidate(cacheName = "should-throw-private-method-target-exception") + @CacheInvalidate(cacheName = "should-throw-private-method-target-exception") + private void shouldAlsoThrowPrivateMethodTargetException() { + } + + @GET + @CacheResult(cacheName = "should-throw-void-return-type-target-exception") + public void showThrowVoidReturnTypeTargetException(String key) { + } + } + + @ApplicationScoped + // Repeated annotations test. + @CacheInvalidateAll(cacheName = "should-throw-class-target-exception") + @CacheInvalidateAll(cacheName = "should-throw-class-target-exception") + static class TestBean { + + @CacheName(UNKNOWN_CACHE_1) + Cache cache; + + public TestBean(@CacheName(UNKNOWN_CACHE_2) Cache cache) { + } + + public void setCache(@CacheName(UNKNOWN_CACHE_3) Cache cache) { + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DisabledCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DisabledCacheTest.java new file mode 100644 index 0000000000000..57234f7b34e88 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DisabledCacheTest.java @@ -0,0 +1,43 @@ +package io.quarkus.cache.test.deployment; + +import static org.junit.jupiter.api.Assertions.assertNotEquals; + +import javax.enterprise.context.Dependent; +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheResult; +import io.quarkus.test.QuarkusUnitTest; + +public class DisabledCacheTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset("quarkus.cache.enabled=false"), "application.properties") + .addClass(CachedService.class)); + + private static final String KEY = "key"; + + @Inject + CachedService cachedService; + + @Test + public void testEnabledFlagProperty() { + assertNotEquals(cachedService.cachedMethod(KEY), cachedService.cachedMethod(KEY)); + } + + @Dependent + static class CachedService { + + @CacheResult(cacheName = "test-cache") + public Object cachedMethod(String key) { + return new Object(); + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/UnknownCacheTypeTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/UnknownCacheTypeTest.java index e45781af4745f..3beff50967f1e 100644 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/UnknownCacheTypeTest.java +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/UnknownCacheTypeTest.java @@ -2,6 +2,7 @@ import static org.junit.jupiter.api.Assertions.fail; +import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.spi.DeploymentException; import org.jboss.shrinkwrap.api.ShrinkWrap; @@ -10,6 +11,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; +import io.quarkus.cache.CacheResult; import io.quarkus.test.QuarkusUnitTest; public class UnknownCacheTypeTest { @@ -17,11 +19,21 @@ public class UnknownCacheTypeTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) - .addAsResource(new StringAsset("quarkus.cache.type=i_am_an_unknown_cache_type"), "application.properties")) + .addAsResource(new StringAsset("quarkus.cache.type=i_am_an_unknown_cache_type"), "application.properties") + .addClass(CachedService.class)) .setExpectedException(DeploymentException.class); @Test public void shouldNotBeInvoked() { fail("This method should not be invoked"); } + + @ApplicationScoped + static class CachedService { + + @CacheResult(cacheName = "test-cache") + public Object cachedMethod(String key) { + return new Object(); + } + } } diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptionContextTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptionContextTest.java new file mode 100644 index 0000000000000..2c79c35879156 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptionContextTest.java @@ -0,0 +1,53 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.lang.annotation.Annotation; +import java.util.ArrayList; + +import org.junit.jupiter.api.Test; + +import io.quarkus.cache.CacheResult; +import io.quarkus.cache.runtime.CacheInterceptionContext; + +public class CacheInterceptionContextTest { + + @Test + public void testConstructor() { + assertThrows(NullPointerException.class, () -> { + new CacheInterceptionContext<>(null, new ArrayList<>()); + }, "A NullPointerException should be thrown when the interceptor bindings list is null"); + assertThrows(NullPointerException.class, () -> { + new CacheInterceptionContext<>(new ArrayList<>(), null); + }, "A NullPointerException should be thrown when the cache key parameter positions list is null"); + // Empty lists should be allowed. + new CacheInterceptionContext<>(new ArrayList<>(), new ArrayList<>()); + } + + @Test + public void testImmutability() { + CacheInterceptionContext context = new CacheInterceptionContext<>(new ArrayList<>(), new ArrayList<>()); + // Lists should be unmodifiable. + assertThrows(UnsupportedOperationException.class, () -> { + context.getInterceptorBindings().add(new CacheResult() { + @Override + public Class annotationType() { + return CacheResult.class; + } + + @Override + public String cacheName() { + return "cacheName"; + } + + @Override + public long lockTimeout() { + return 0; + } + }); + }); + assertThrows(UnsupportedOperationException.class, () -> { + context.getCacheKeyParameterPositions().add((short) 123); + }); + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptorTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptorTest.java new file mode 100644 index 0000000000000..a78c7dfed858d --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheInterceptorTest.java @@ -0,0 +1,84 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import io.quarkus.cache.runtime.AbstractCache; +import io.quarkus.cache.runtime.CacheInterceptor; +import io.quarkus.cache.runtime.CompositeCacheKey; +import io.quarkus.cache.runtime.DefaultCacheKey; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.cache.runtime.caffeine.CaffeineCacheInfo; + +public class CacheInterceptorTest { + + private static final TestCacheInterceptor TEST_CACHE_INTERCEPTOR = new TestCacheInterceptor(); + + @Test + public void testDefaultKey() { + // We need a CaffeineCache instance to test the default key logic. + CaffeineCacheInfo cacheInfo = new CaffeineCacheInfo(); + cacheInfo.name = "test-cache"; + CaffeineCache cache = new CaffeineCache(cacheInfo); + + DefaultCacheKey expectedKey = new DefaultCacheKey(cacheInfo.name); + Object actualKey = getCacheKey(cache, Collections.emptyList(), new Object[] {}); + assertEquals(expectedKey, actualKey); + } + + @Test + public void testExplicitSimpleKey() { + Object expectedKey = new Object(); + Object actualKey = getCacheKey(Arrays.asList((short) 1), new Object[] { new Object(), expectedKey }); + // A cache key with one element should be the element itself (same object reference). + assertEquals(expectedKey, actualKey); + } + + @Test + public void testExplicitCompositeKey() { + Object keyElement1 = new Object(); + Object keyElement2 = new Object(); + Object expectedKey = new CompositeCacheKey(keyElement1, keyElement2); + Object actualKey = getCacheKey(Arrays.asList((short) 0, (short) 2), + new Object[] { keyElement1, new Object(), keyElement2 }); + assertEquals(expectedKey, actualKey); + } + + @Test + public void testImplicitSimpleKey() { + Object expectedKey = new Object(); + Object actualKey = getCacheKey(Collections.emptyList(), new Object[] { expectedKey }); + // A cache key with one element should be the element itself (same object reference). + assertEquals(expectedKey, actualKey); + } + + @Test + public void testImplicitCompositeKey() { + Object keyElement1 = new Object(); + Object keyElement2 = new Object(); + Object expectedKey = new CompositeCacheKey(keyElement1, keyElement2); + Object actualKey = getCacheKey(Collections.emptyList(), new Object[] { keyElement1, keyElement2 }); + assertEquals(expectedKey, actualKey); + } + + private Object getCacheKey(AbstractCache cache, List cacheKeyParameterPositions, Object[] methodParameterValues) { + return TEST_CACHE_INTERCEPTOR.getCacheKey(cache, cacheKeyParameterPositions, methodParameterValues); + } + + private Object getCacheKey(List cacheKeyParameterPositions, Object[] methodParameterValues) { + return TEST_CACHE_INTERCEPTOR.getCacheKey(null, cacheKeyParameterPositions, methodParameterValues); + } + + // This inner class changes the CacheInterceptor#getCacheKey method visibility to public. + private static class TestCacheInterceptor extends CacheInterceptor { + @Override + public Object getCacheKey(AbstractCache cache, List cacheKeyParameterPositions, Object[] methodParameterValues) { + return super.getCacheKey(cache, cacheKeyParameterPositions, methodParameterValues); + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheKeyBuilderTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheKeyBuilderTest.java deleted file mode 100644 index 3f45d516ec9ac..0000000000000 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheKeyBuilderTest.java +++ /dev/null @@ -1,77 +0,0 @@ -package io.quarkus.cache.test.runtime; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.math.BigDecimal; -import java.util.Arrays; -import java.util.Collections; -import java.util.concurrent.atomic.AtomicInteger; - -import org.junit.jupiter.api.Test; - -import io.quarkus.cache.runtime.CacheKeyBuilder; - -public class CacheKeyBuilderTest { - - @Test - public void testInvalidKeyElements() { - assertThrows(IllegalArgumentException.class, () -> { - CacheKeyBuilder.build(null); - }); - assertThrows(IllegalArgumentException.class, () -> { - CacheKeyBuilder.build(Collections.emptyList()); - }); - assertThrows(NullPointerException.class, () -> { - CacheKeyBuilder.build(Collections.singletonList(null)); - }, CacheKeyBuilder.NULL_KEYS_NOT_SUPPORTED_MSG); - } - - @Test - public void testSimpleKey() { - String keyElement = "quarkus"; - - // A cache key with one element should be the element itself (same object reference). - Object simpleKey1 = CacheKeyBuilder.build(Collections.singletonList(keyElement)); - assertTrue(keyElement == simpleKey1); - - // Two cache keys built from the same single element should have the same object reference. - Object simpleKey2 = CacheKeyBuilder.build(Collections.singletonList(keyElement)); - assertTrue(simpleKey1 == simpleKey2); - - // Two cache keys built from different single elements should not be equal. - Object simpleKey3 = CacheKeyBuilder.build(Collections.singletonList(Boolean.valueOf("true"))); - assertNotEquals(simpleKey2, simpleKey3); - } - - @Test - public void testCompositeKey() { - String keyElement1 = "quarkus"; - long keyElement2 = 123L; - Object keyElement3 = new Object(); - - // Two cache keys built from the same elements and in the same order should be equal. - Object compositeKey1 = CacheKeyBuilder.build(Arrays.asList(keyElement1, keyElement2, keyElement3)); - Object compositeKey2 = CacheKeyBuilder.build(Arrays.asList(keyElement1, keyElement2, keyElement3)); - assertEquals(compositeKey1, compositeKey2); - - // Two cache keys built from the same elements but not in the same order should not be equal. - Object compositeKey3 = CacheKeyBuilder.build(Arrays.asList(keyElement2, keyElement1, keyElement3)); - assertNotEquals(compositeKey2, compositeKey3); - - // Two cache keys built from a different number of elements should not be equal. - Object compositeKey4 = CacheKeyBuilder - .build(Arrays.asList(keyElement1, keyElement2, keyElement3, keyElement1, keyElement2, keyElement3)); - assertNotEquals(compositeKey2, compositeKey4); - - // Two cache keys built from multiple elements should only be equal if all elements are equal pairwise. - Object compositeKey5 = CacheKeyBuilder.build(Arrays.asList(new AtomicInteger(456), keyElement2, keyElement3)); - assertNotEquals(compositeKey2, compositeKey5); - Object compositeKey6 = CacheKeyBuilder.build(Arrays.asList(keyElement1, new Object(), keyElement3)); - assertNotEquals(compositeKey2, compositeKey6); - Object compositeKey7 = CacheKeyBuilder.build(Arrays.asList(keyElement1, keyElement2, new BigDecimal(10))); - assertNotEquals(compositeKey2, compositeKey7); - } -} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheNamesTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheNamesTest.java new file mode 100644 index 0000000000000..ccfd0dbe76667 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CacheNamesTest.java @@ -0,0 +1,76 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheInvalidate; +import io.quarkus.cache.CacheInvalidateAll; +import io.quarkus.cache.CacheKey; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.CacheResult; +import io.quarkus.test.QuarkusUnitTest; + +public class CacheNamesTest { + + private static final String CACHE_NAME_1 = "test-cache-1"; + private static final String CACHE_NAME_2 = "test-cache-2"; + private static final String CACHE_NAME_3 = "test-cache-3"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class).addClasses(CachedService1.class, CachedService2.class)); + + @Inject + CacheManager cacheManager; + + @Test + public void testCacheNamesCollection() { + /* + * The main goal of this test is to check that a cache with an empty name is not instantiated at build time because of + * the bindings with an empty `cacheName` parameter from the cache interceptors. + */ + List cacheNames = new ArrayList<>(cacheManager.getCacheNames()); + assertEquals(3, cacheNames.size()); + assertTrue(cacheNames.containsAll(Arrays.asList(CACHE_NAME_1, CACHE_NAME_2, CACHE_NAME_3))); + } + + @ApplicationScoped + static class CachedService1 { + + @CacheResult(cacheName = CACHE_NAME_1) + public String getValue(Object key) { + return new String(); + } + + @CacheInvalidate(cacheName = CACHE_NAME_2) + public void invalidate(Object key) { + } + } + + @Singleton + static class CachedService2 { + + @CacheResult(cacheName = CACHE_NAME_1) + public String getValue(@CacheKey Object key, BigDecimal notPartOfTheKey) { + return new String(); + } + + @CacheInvalidateAll(cacheName = CACHE_NAME_3) + public void invalidateAll() { + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheInjectionTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheInjectionTest.java new file mode 100644 index 0000000000000..dd90c8f09d77d --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheInjectionTest.java @@ -0,0 +1,72 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheInvalidateAll; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.CacheName; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.test.QuarkusUnitTest; + +public class CaffeineCacheInjectionTest { + + private static final String CACHE_NAME = "test-cache"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(CachedService.class)); + + @Inject + CachedService cachedService; + + @CacheName(CACHE_NAME) + Cache cache; + + @Inject + CacheManager cacheManager; + + @Test + public void testInjection() { + assertEquals(CaffeineCache.class, cache.getClass()); + assertEquals(cache, cacheManager.getCache(CACHE_NAME).get()); + assertEquals(cache, cachedService.getConstructorInjectedCache()); + assertEquals(cache, cachedService.getMethodInjectedCache()); + } + + @ApplicationScoped + static class CachedService { + + Cache constructorInjectedCache; + Cache methodInjectedCache; + + public CachedService(@CacheName(CACHE_NAME) Cache cache) { + constructorInjectedCache = cache; + } + + public Cache getConstructorInjectedCache() { + return constructorInjectedCache; + } + + public Cache getMethodInjectedCache() { + return methodInjectedCache; + } + + @Inject + public void setMethodInjectedCache(@CacheName(CACHE_NAME) Cache cache) { + methodInjectedCache = cache; + } + + @CacheInvalidateAll(cacheName = CACHE_NAME) + public void invalidateAll() { + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheLockTimeoutTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheLockTimeoutTest.java deleted file mode 100644 index 56d1398cf5d4c..0000000000000 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CaffeineCacheLockTimeoutTest.java +++ /dev/null @@ -1,112 +0,0 @@ -package io.quarkus.cache.test.runtime; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import javax.inject.Inject; -import javax.inject.Singleton; - -import org.jboss.shrinkwrap.api.ShrinkWrap; -import org.jboss.shrinkwrap.api.asset.StringAsset; -import org.jboss.shrinkwrap.api.spec.JavaArchive; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import io.quarkus.cache.CacheResult; -import io.quarkus.test.QuarkusUnitTest; - -public class CaffeineCacheLockTimeoutTest { - - private static final Object TIMEOUT_KEY = new Object(); - private static final Object NO_TIMEOUT_KEY = new Object(); - - @RegisterExtension - static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) - .addAsResource(new StringAsset("quarkus.cache.type=caffeine"), "application.properties") - .addClass(CachedService.class)); - - @Inject - CachedService cachedService; - - @Test - public void testConcurrentCacheAccessWithLockTimeout() throws InterruptedException, ExecutionException { - // This is required to make sure the CompletableFuture from this test are executed concurrently. - ExecutorService executorService = Executors.newFixedThreadPool(2); - - CompletableFuture future1 = CompletableFuture.supplyAsync(() -> { - try { - return cachedService.cachedMethodWithLockTimeout(TIMEOUT_KEY); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }, executorService); - - CompletableFuture future2 = CompletableFuture.supplyAsync(() -> { - try { - return cachedService.cachedMethodWithLockTimeout(TIMEOUT_KEY); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }, executorService); - - CompletableFuture.allOf(future1, future2).get(); - - // The following assertion checks that the objects references resulting from both futures executions are different, - // which means the method was invoked twice because the lock timeout was triggered. - assertTrue(future1.get() != future2.get()); - } - - @Test - public void testConcurrentCacheAccessWithoutLockTimeout() throws InterruptedException, ExecutionException { - // This is required to make sure the CompletableFuture from this test are executed concurrently. - ExecutorService executorService = Executors.newFixedThreadPool(2); - - CompletableFuture future1 = CompletableFuture.supplyAsync(() -> { - try { - return cachedService.cachedMethodWithoutLockTimeout(NO_TIMEOUT_KEY); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }, executorService); - - CompletableFuture future2 = CompletableFuture.supplyAsync(() -> { - try { - return cachedService.cachedMethodWithoutLockTimeout(NO_TIMEOUT_KEY); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }, executorService); - - CompletableFuture.allOf(future1, future2).get(); - - // The following assertion checks that the objects references resulting from both futures executions are the same, - // which means the method was invoked once and the lock timeout was NOT triggered. - assertTrue(future1.get() == future2.get()); - } - - @Singleton - static class CachedService { - - private static final String CACHE_NAME = "test-cache"; - - @CacheResult(cacheName = CACHE_NAME, lockTimeout = 500) - public Object cachedMethodWithLockTimeout(Object key) throws InterruptedException { - // The following sleep is longer than the @CacheResult lockTimeout parameter value, a timeout will be triggered if - // two concurrent calls are made at the same time. - Thread.sleep(1000); - return new Object(); - } - - @CacheResult(cacheName = CACHE_NAME, lockTimeout = 1000) - public Object cachedMethodWithoutLockTimeout(Object key) throws InterruptedException { - // The following sleep is shorter than the @CacheResult lockTimeout parameter value, two concurrent calls made at - // the same time won't trigger a timeout. - Thread.sleep(500); - return new Object(); - } - } -} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CompositeCacheKeyTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CompositeCacheKeyTest.java new file mode 100644 index 0000000000000..da9885f7e2ca7 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CompositeCacheKeyTest.java @@ -0,0 +1,51 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.math.BigDecimal; + +import org.junit.jupiter.api.Test; + +import io.quarkus.cache.runtime.CompositeCacheKey; + +public class CompositeCacheKeyTest { + + private static final String KEY_ELEMENT_1 = "test"; + private static final long KEY_ELEMENT_2 = 123L; + private static final Object KEY_ELEMENT_3 = new Object(); + + @Test + public void testEquality() { + + // Two composite keys built from the same elements and in the same order should be equal. + CompositeCacheKey key1 = new CompositeCacheKey(KEY_ELEMENT_1, KEY_ELEMENT_2, KEY_ELEMENT_3); + CompositeCacheKey key2 = new CompositeCacheKey(KEY_ELEMENT_1, KEY_ELEMENT_2, KEY_ELEMENT_3); + assertEquals(key1, key2); + assertEquals(key1.hashCode(), key2.hashCode()); + + // If the same elements are used but their order is changed, the keys should not be equal. + CompositeCacheKey key3 = new CompositeCacheKey(KEY_ELEMENT_2, KEY_ELEMENT_1, KEY_ELEMENT_3); + assertNotEquals(key2, key3); + + // If the numbers of elements are different, the keys should not be equal. + CompositeCacheKey key4 = new CompositeCacheKey(KEY_ELEMENT_1, KEY_ELEMENT_2, KEY_ELEMENT_3, new Object()); + assertNotEquals(key2, key4); + + // Two composite keys built from multiple elements should only be equal if all elements are equal pairwise. + CompositeCacheKey key5 = new CompositeCacheKey(Boolean.TRUE, KEY_ELEMENT_2, KEY_ELEMENT_3); + assertNotEquals(key2, key5); + CompositeCacheKey key6 = new CompositeCacheKey(KEY_ELEMENT_1, new Object(), KEY_ELEMENT_3); + assertNotEquals(key2, key6); + CompositeCacheKey key7 = new CompositeCacheKey(KEY_ELEMENT_1, KEY_ELEMENT_2, BigDecimal.TEN); + assertNotEquals(key2, key7); + } + + @Test + public void testNoElementsKey() { + assertThrows(IllegalArgumentException.class, () -> { + new CompositeCacheKey(); + }); + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ConcurrencyTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ConcurrencyTest.java new file mode 100644 index 0000000000000..206afa5b9e329 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ConcurrencyTest.java @@ -0,0 +1,138 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicReference; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheResult; +import io.quarkus.test.QuarkusUnitTest; + +public class ConcurrencyTest { + + private static final Object TIMEOUT_KEY = new Object(); + private static final Object NO_TIMEOUT_KEY = new Object(); + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(CachedService.class)); + + @Inject + CachedService cachedService; + + @Test + public void testConcurrentCacheAccessWithLockTimeout() throws InterruptedException, ExecutionException { + // This is required to make sure the CompletableFuture from this test are executed concurrently. + ExecutorService executorService = Executors.newFixedThreadPool(2); + + AtomicReference callingThreadName1 = new AtomicReference<>(); + CompletableFuture future1 = CompletableFuture.supplyAsync(() -> { + callingThreadName1.set(Thread.currentThread().getName()); + try { + return cachedService.cachedMethodWithLockTimeout(TIMEOUT_KEY); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, executorService); + + AtomicReference callingThreadName2 = new AtomicReference<>(); + CompletableFuture future2 = CompletableFuture.supplyAsync(() -> { + callingThreadName2.set(Thread.currentThread().getName()); + try { + return cachedService.cachedMethodWithLockTimeout(TIMEOUT_KEY); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, executorService); + + // Let's wait for both futures to complete before running assertions. + CompletableFuture.allOf(future1, future2).get(); + + /* + * A cache lock timeout should be triggered during the second cached method call, which means that both calls should + * result in an execution of the value loader function. Since the calls are done from different threads, the resulting + * values (threads names) should be different. + */ + assertNotEquals(future1.get(), future2.get()); + + // All value loader executions should be done synchronously on the calling thread. + assertEquals(callingThreadName1.get(), future1.get()); + assertEquals(callingThreadName2.get(), future2.get()); + } + + @Test + public void testConcurrentCacheAccessWithoutLockTimeout() throws InterruptedException, ExecutionException { + // This is required to make sure the CompletableFuture from this test are executed concurrently. + ExecutorService executorService = Executors.newFixedThreadPool(2); + + AtomicReference callingThreadName1 = new AtomicReference<>(); + CompletableFuture future1 = CompletableFuture.supplyAsync(() -> { + callingThreadName1.set(Thread.currentThread().getName()); + try { + return cachedService.cachedMethodWithoutLockTimeout(NO_TIMEOUT_KEY); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, executorService); + + AtomicReference callingThreadName2 = new AtomicReference<>(); + CompletableFuture future2 = CompletableFuture.supplyAsync(() -> { + callingThreadName2.set(Thread.currentThread().getName()); + try { + return cachedService.cachedMethodWithoutLockTimeout(NO_TIMEOUT_KEY); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, executorService); + + // Let's wait for both futures to complete before running assertions. + CompletableFuture.allOf(future1, future2).get(); + + /* + * This test should NOT trigger any cache lock timeout so there should only be one value loader execution and the + * resulting values from both cached methods calls should be the same. + */ + assertEquals(future1.get(), future2.get()); + + /* + * The value loader execution should be done synchronously on the calling thread. + * Both thread names need to be checked because there's no way to determine which future will be run first. + */ + assertTrue(callingThreadName1.get().equals(future1.get()) || callingThreadName2.get().equals(future1.get())); + } + + @Singleton + static class CachedService { + + private static final String CACHE_NAME = "test-cache"; + + @CacheResult(cacheName = CACHE_NAME, lockTimeout = 500) + public String cachedMethodWithLockTimeout(Object key) throws InterruptedException { + // The following sleep is longer than the @CacheResult lockTimeout parameter value, a timeout will be triggered if + // two concurrent calls are made at the same time. + Thread.sleep(1000); + return Thread.currentThread().getName(); + } + + @CacheResult(cacheName = CACHE_NAME, lockTimeout = 1000) + public String cachedMethodWithoutLockTimeout(Object key) throws InterruptedException { + // The following sleep is shorter than the @CacheResult lockTimeout parameter value, two concurrent calls made at + // the same time won't trigger a timeout. + Thread.sleep(500); + return Thread.currentThread().getName(); + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ExplicitCompositeKeyCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ExplicitCompositeKeyCacheTest.java index 189336b5238a3..fe6ecb0796294 100644 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ExplicitCompositeKeyCacheTest.java +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ExplicitCompositeKeyCacheTest.java @@ -81,7 +81,7 @@ public void testAllCacheAnnotations() { // Action: cache entry invalidation. // Expected effect: STEP 2 cache entry removed. // Verified by: STEP 7. - cachedService.invalidate(KEY_1_ELEMENT_1, KEY_1_ELEMENT_2, new Object()); + cachedService.invalidate(new Object(), KEY_1_ELEMENT_1, KEY_1_ELEMENT_2); // STEP 7 // Action: same call as STEP 2. @@ -129,7 +129,7 @@ public String cachedMethod(@CacheKey Locale keyElement1, @CacheKey BigDecimal ke } @CacheInvalidate(cacheName = CACHE_NAME) - public void invalidate(@CacheKey Locale keyElement1, @CacheKey BigDecimal keyElement2, Object notPartOfTheKey) { + public void invalidate(Object notPartOfTheKey, @CacheKey Locale keyElement1, @CacheKey BigDecimal keyElement2) { } @CacheInvalidateAll(cacheName = CACHE_NAME) diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheInjectionTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheInjectionTest.java new file mode 100644 index 0000000000000..3d1278adf7835 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheInjectionTest.java @@ -0,0 +1,52 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheInvalidateAll; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.CacheName; +import io.quarkus.cache.runtime.noop.NoOpCache; +import io.quarkus.test.QuarkusUnitTest; + +public class NoOpCacheInjectionTest { + + private static final String CACHE_NAME = "test-cache"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset("quarkus.cache.enabled=false"), "application.properties") + .addClass(CachedService.class)); + + @Inject + CachedService cachedService; + + @CacheName(CACHE_NAME) + Cache cache; + + @Inject + CacheManager cacheManager; + + @Test + public void testInjection() { + assertEquals(NoOpCache.class, cache.getClass()); + assertEquals(cache, cacheManager.getCache(CACHE_NAME).get()); + } + + @Singleton + static class CachedService { + + @CacheInvalidateAll(cacheName = CACHE_NAME) + public void invalidateAll() { + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheTest.java new file mode 100644 index 0000000000000..af194e30d71e4 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NoOpCacheTest.java @@ -0,0 +1,112 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheInvalidate; +import io.quarkus.cache.CacheInvalidateAll; +import io.quarkus.cache.CacheResult; +import io.quarkus.test.QuarkusUnitTest; + +public class NoOpCacheTest { + + private static final String CACHE_NAME = "test-cache"; + private static final Object KEY = new Object(); + private static final String FORCED_EXCEPTION_MESSAGE = "Forced exception"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset("quarkus.cache.enabled=false"), "application.properties") + .addClass(CachedService.class)); + + @Inject + CachedService cachedService; + + @Test + public void testAllCacheAnnotations() { + // STEP 1 + // Action: @CacheResult-annotated method call. + // Expected effect: method invoked and result not cached. + // Verified by: STEP 2. + String value1 = cachedService.cachedMethod(KEY); + + // STEP 2 + // Action: same call as STEP 1. + // Expected effect: method invoked and result not cached. + // Verified by: different objects references between STEPS 1 and 2 results. + String value2 = cachedService.cachedMethod(KEY); + assertTrue(value1 != value2); + + // The following methods have no effect at all, but let's check if they're running fine anyway. + cachedService.invalidate(KEY); + cachedService.invalidateAll(); + } + + @Test + public void testRuntimeExceptionThrowDuringCacheComputation() { + NumberFormatException e = assertThrows(NumberFormatException.class, () -> { + cachedService.throwRuntimeExceptionDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + } + + @Test + public void testCheckedExceptionThrowDuringCacheComputation() { + IOException e = assertThrows(IOException.class, () -> { + cachedService.throwCheckedExceptionDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + } + + @Test + public void testErrorThrowDuringCacheComputation() { + OutOfMemoryError e = assertThrows(OutOfMemoryError.class, () -> { + cachedService.throwErrorDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + } + + @Singleton + static class CachedService { + + @CacheResult(cacheName = CACHE_NAME) + public String cachedMethod(Object key) { + return new String(); + } + + @CacheInvalidate(cacheName = CACHE_NAME) + public void invalidate(Object key) { + } + + @CacheInvalidateAll(cacheName = CACHE_NAME) + public void invalidateAll() { + } + + @CacheResult(cacheName = "runtime-exception-cache") + public String throwRuntimeExceptionDuringCacheComputation() { + throw new NumberFormatException(FORCED_EXCEPTION_MESSAGE); + } + + @CacheResult(cacheName = "checked-exception-cache") + public String throwCheckedExceptionDuringCacheComputation() throws IOException { + throw new IOException(FORCED_EXCEPTION_MESSAGE); + } + + @CacheResult(cacheName = "error-cache") + public String throwErrorDuringCacheComputation() { + throw new OutOfMemoryError(FORCED_EXCEPTION_MESSAGE); + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NullKeyOrValueCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NullKeyOrValueCacheTest.java index 9dfd126094626..05047bc4e705b 100644 --- a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NullKeyOrValueCacheTest.java +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/NullKeyOrValueCacheTest.java @@ -1,6 +1,6 @@ package io.quarkus.cache.test.runtime; -import static io.quarkus.cache.runtime.CacheKeyBuilder.NULL_KEYS_NOT_SUPPORTED_MSG; +import static io.quarkus.cache.runtime.AbstractCache.NULL_KEYS_NOT_SUPPORTED_MSG; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ThrowExecutionExceptionCauseTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ThrowExecutionExceptionCauseTest.java new file mode 100644 index 0000000000000..0c4ddb1a0267a --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ThrowExecutionExceptionCauseTest.java @@ -0,0 +1,76 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.io.IOException; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheResult; +import io.quarkus.test.QuarkusUnitTest; + +public class ThrowExecutionExceptionCauseTest { + + private static final String FORCED_EXCEPTION_MESSAGE = "Forced exception"; + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(CachedService.class)); + + @Inject + CachedService cachedService; + + @Test + public void testRuntimeExceptionThrowDuringCacheComputation() { + NumberFormatException e = assertThrows(NumberFormatException.class, () -> { + cachedService.throwRuntimeExceptionDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + // Let's check we didn't put an uncompleted future in the cache because of the previous exception. + assertThrows(NumberFormatException.class, () -> { + cachedService.throwRuntimeExceptionDuringCacheComputation(); + }); + } + + @Test + public void testCheckedExceptionThrowDuringCacheComputation() { + IOException e = assertThrows(IOException.class, () -> { + cachedService.throwCheckedExceptionDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + } + + @Test + public void testErrorThrowDuringCacheComputation() { + OutOfMemoryError e = assertThrows(OutOfMemoryError.class, () -> { + cachedService.throwErrorDuringCacheComputation(); + }); + assertEquals(FORCED_EXCEPTION_MESSAGE, e.getMessage()); + } + + @ApplicationScoped + static class CachedService { + + @CacheResult(cacheName = "runtime-exception-cache") + public String throwRuntimeExceptionDuringCacheComputation() { + throw new NumberFormatException(FORCED_EXCEPTION_MESSAGE); + } + + @CacheResult(cacheName = "checked-exception-cache") + public String throwCheckedExceptionDuringCacheComputation() throws IOException { + throw new IOException(FORCED_EXCEPTION_MESSAGE); + } + + @CacheResult(cacheName = "error-cache") + public String throwErrorDuringCacheComputation() { + throw new OutOfMemoryError(FORCED_EXCEPTION_MESSAGE); + } + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroCaffeineCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroCaffeineCacheTest.java new file mode 100644 index 0000000000000..1d3894afa09d4 --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroCaffeineCacheTest.java @@ -0,0 +1,33 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheManager; +import io.quarkus.test.QuarkusUnitTest; + +/** + * This test checks that no exception is thrown when a {@link CacheManager} method is called while the extension is enabled and + * no cache is declared in the application. + */ +public class ZeroCaffeineCacheTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)); + + @Inject + CacheManager cacheManager; + + @Test + public void testNoExceptionThrown() { + assertTrue(cacheManager.getCacheNames().isEmpty()); + assertFalse(cacheManager.getCache("unknown-cache").isPresent()); + } +} diff --git a/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroNoOpCacheTest.java b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroNoOpCacheTest.java new file mode 100644 index 0000000000000..9d14808d9c77b --- /dev/null +++ b/extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ZeroNoOpCacheTest.java @@ -0,0 +1,35 @@ +package io.quarkus.cache.test.runtime; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import javax.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.cache.CacheManager; +import io.quarkus.test.QuarkusUnitTest; + +/** + * This test checks that no exception is thrown when a {@link CacheManager} method is called while the extension is disabled + * and no cache is declared in the application. + */ +public class ZeroNoOpCacheTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addAsResource(new StringAsset("quarkus.cache.enabled=false"), "application.properties")); + + @Inject + CacheManager cacheManager; + + @Test + public void testNoExceptionThrown() { + assertTrue(cacheManager.getCacheNames().isEmpty()); + assertFalse(cacheManager.getCache("unknown-cache").isPresent()); + } +} diff --git a/extensions/cache/pom.xml b/extensions/cache/pom.xml index 1b80933ca83db..65befebd45a1e 100644 --- a/extensions/cache/pom.xml +++ b/extensions/cache/pom.xml @@ -5,10 +5,10 @@ 4.0.0 - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml quarkus-cache-parent diff --git a/extensions/cache/runtime/pom.xml b/extensions/cache/runtime/pom.xml index d0bbb9cc44967..73dd9f19a3744 100644 --- a/extensions/cache/runtime/pom.xml +++ b/extensions/cache/runtime/pom.xml @@ -8,7 +8,6 @@ quarkus-cache-parent io.quarkus 999-SNAPSHOT - ../ quarkus-cache @@ -25,8 +24,14 @@ quarkus-caffeine - org.eclipse.microprofile.context-propagation - microprofile-context-propagation-api + io.vertx + vertx-web + true + + + io.quarkus + quarkus-vertx-http + true diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/Cache.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/Cache.java new file mode 100644 index 0000000000000..8faeb4ee1449a --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/Cache.java @@ -0,0 +1,13 @@ +package io.quarkus.cache; + +/** + * Use this interface to interact with a cache programmatically. The cache can be injected using the {@link CacheName} + * annotation or retrieved using {@link CacheManager}. + */ +public interface Cache { + + /* + * This interface is intentionally empty for now. + * TODO: Add the methods that will be exposed by the programmatic API. + */ +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidate.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidate.java index 4f1a27f46e4e4..69a2f2c6ab4d8 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidate.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidate.java @@ -7,6 +7,7 @@ import java.lang.annotation.Target; import javax.enterprise.util.Nonbinding; +import javax.interceptor.InterceptorBinding; import io.quarkus.cache.CacheInvalidate.List; @@ -15,7 +16,7 @@ * remove an existing entry from the cache. If the method has one or more arguments, the key computation is done from all the * method arguments if none of them is annotated with {@link CacheKey}, or all the arguments annotated with {@link CacheKey} * otherwise. This annotation can also be used on a method with no arguments, a default key derived from the cache name is - * generated in that case. If the key does not identify any cache entry, nothing will happen. + * used in that case. If the key does not identify any cache entry, nothing will happen. *

    * This annotation can be combined with multiple other caching annotations on a single method. Caching operations will always * be executed in the same order: {@link CacheInvalidateAll} first, then {@link CacheInvalidate} and finally @@ -23,8 +24,9 @@ *

    * The underlying caching provider can be chosen and configured in the Quarkus {@link application.properties} file. */ +@InterceptorBinding +@Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) @Repeatable(List.class) public @interface CacheInvalidate { @@ -34,8 +36,8 @@ @Nonbinding String cacheName(); + @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.METHOD) @interface List { CacheInvalidate[] value(); } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidateAll.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidateAll.java index 51b166bf60f4f..62b8ebb808b6a 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidateAll.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheInvalidateAll.java @@ -7,6 +7,7 @@ import java.lang.annotation.Target; import javax.enterprise.util.Nonbinding; +import javax.interceptor.InterceptorBinding; import io.quarkus.cache.CacheInvalidateAll.List; @@ -19,8 +20,9 @@ *

    * The underlying caching provider can be chosen and configured in the Quarkus {@link application.properties} file. */ +@InterceptorBinding +@Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) @Repeatable(List.class) public @interface CacheInvalidateAll { @@ -30,8 +32,8 @@ @Nonbinding String cacheName(); + @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.METHOD) @interface List { CacheInvalidateAll[] value(); } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheManager.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheManager.java new file mode 100644 index 0000000000000..114d65b9345bc --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheManager.java @@ -0,0 +1,46 @@ +package io.quarkus.cache; + +import java.util.Collection; +import java.util.Optional; + +/** + *

    + * Use this interface to retrieve all existing {@link Cache} names and interact with any cache programmatically. It shares the + * same cache collection the Quarkus caching annotations use. The {@link CacheName} annotation can also be used to inject and + * access a specific cache from its name. + *

    + *

    + * Code example: + * + *

    + * {@literal @}ApplicationScoped
    + * public class CachedService {
    + * 
    + *     {@literal @}Inject
    + *     CacheManager cacheManager;
    + *     
    + *     void doSomething() {
    + *         Cache cache = cacheManager.getCache("my-cache");
    + *         // Interact with the cache.
    + *     }
    + * }
    + * 
    + *

    + */ +public interface CacheManager { + + /** + * Gets a collection of all cache names. + * + * @return names of all caches + */ + Collection getCacheNames(); + + /** + * Gets the cache identified by the given name. + * + * @param name cache name + * @return an {@link Optional} containing the identified cache if it exists, or an empty {@link Optional} otherwise + */ + Optional getCache(String name); +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheName.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheName.java new file mode 100644 index 0000000000000..7a375c3b3aeaf --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheName.java @@ -0,0 +1,79 @@ +package io.quarkus.cache; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import javax.enterprise.util.Nonbinding; +import javax.inject.Qualifier; + +/** + *

    + * Use this annotation on a field, a constructor parameter or a method parameter to inject a {@link Cache} and interact with it + * programmatically. + *

    + *

    + * Field injection example: + * + *

    + * {@literal @}ApplicationScoped
    + * public class CachedService {
    + * 
    + *     {@literal @}CacheName("my-cache")
    + *     Cache cache;
    + *     
    + *     // Interact with the cache.
    + * }
    + * 
    + * + * Constructor parameter injection example: + * + *
    + * {@literal @}ApplicationScoped
    + * public class CachedService {
    + * 
    + *     private Cache cache;
    + * 
    + *     public CachedService(@CacheName("my-cache") Cache cache) {
    + *         this.cache = cache;
    + *     }
    + *     
    + *     // Interact with the cache.
    + * }
    + * 
    + * + * Method parameter injection example: + * + *
    + * {@literal @}ApplicationScoped
    + * public class CachedService {
    + * 
    + *     private Cache cache;
    + * 
    + *     {@literal @}Inject
    + *     public void setCache(@CacheName("my-cache") Cache cache) {
    + *         this.cache = cache;
    + *     }
    + *     
    + *     // Interact with the cache.
    + * }
    + * 
    + *

    + * + * @see CacheManager + */ +@Qualifier +@Target({ FIELD, METHOD, PARAMETER }) +@Retention(RUNTIME) +public @interface CacheName { + + /** + * The name of the cache. + */ + @Nonbinding + String value(); +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheResult.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheResult.java index 41047adb4bcb8..30357c2364941 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheResult.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/CacheResult.java @@ -6,15 +6,16 @@ import java.lang.annotation.Target; import javax.enterprise.util.Nonbinding; +import javax.interceptor.InterceptorBinding; /** * When a method annotated with {@link CacheResult} is invoked, Quarkus will compute a cache key and use it to check in the * cache whether the method has been already invoked. If the method has one or more arguments, the key computation is done from * all the method arguments if none of them is annotated with {@link CacheKey}, or all the arguments annotated with * {@link CacheKey} otherwise. This annotation can also be used on a method with no arguments, a default key derived from the - * cache name is generated in that case. If a value is found in the cache, it is returned and the annotated method is never - * actually executed. If no value is found, the annotated method is invoked and the returned value is stored in the cache using - * the computed or generated key. + * cache name is used in that case. If a value is found in the cache, it is returned and the annotated method is never actually + * executed. If no value is found, the annotated method is invoked and the returned value is stored in the cache using the + * computed key. *

    * A method annotated with {@link CacheResult} is protected by a lock on cache miss mechanism. If several concurrent * invocations try to retrieve a cache value from the same missing key, the method will only be invoked once. The first @@ -29,8 +30,9 @@ *

    * The underlying caching provider can be chosen and configured in the Quarkus {@link application.properties} file. */ +@InterceptorBinding +@Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) public @interface CacheResult { /** diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/AbstractCache.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/AbstractCache.java new file mode 100644 index 0000000000000..8545d93c1b95a --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/AbstractCache.java @@ -0,0 +1,35 @@ +package io.quarkus.cache.runtime; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; + +import io.quarkus.cache.Cache; + +public abstract class AbstractCache implements Cache { + + public static final String NULL_KEYS_NOT_SUPPORTED_MSG = "Null keys are not supported by the Quarkus application data cache"; + + private Object defaultKey; + + public abstract String getName(); + + /** + * Returns the unique and immutable default key for the current cache. This key is used by the annotations caching API when + * a no-args method annotated with {@link io.quarkus.cache.CacheResult CacheResult} or + * {@link io.quarkus.cache.CacheInvalidate CacheInvalidate} is invoked. + * + * @return default cache key + */ + public Object getDefaultKey() { + if (defaultKey == null) { + defaultKey = new DefaultCacheKey(getName()); + } + return defaultKey; + } + + public abstract CompletableFuture get(Object key, Function valueLoader); + + public abstract void invalidate(Object key); + + public abstract void invalidateAll(); +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheException.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheException.java new file mode 100644 index 0000000000000..4f1ae7daef547 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheException.java @@ -0,0 +1,10 @@ +package io.quarkus.cache.runtime; + +public class CacheException extends RuntimeException { + + private static final long serialVersionUID = -3465350968605912384L; + + public CacheException(Throwable cause) { + super(cause); + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptionContext.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptionContext.java new file mode 100644 index 0000000000000..06bfc80b8ebab --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptionContext.java @@ -0,0 +1,27 @@ +package io.quarkus.cache.runtime; + +import java.lang.annotation.Annotation; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public class CacheInterceptionContext { + + private final List interceptorBindings; + private final List cacheKeyParameterPositions; + + public CacheInterceptionContext(List interceptorBindings, List cacheKeyParameterPositions) { + Objects.requireNonNull(interceptorBindings); + Objects.requireNonNull(cacheKeyParameterPositions); + this.interceptorBindings = Collections.unmodifiableList(interceptorBindings); + this.cacheKeyParameterPositions = Collections.unmodifiableList(cacheKeyParameterPositions); + } + + public List getInterceptorBindings() { + return interceptorBindings; + } + + public List getCacheKeyParameterPositions() { + return cacheKeyParameterPositions; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptor.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptor.java index 6dbe8f668401e..7bf783ff3b23b 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptor.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInterceptor.java @@ -1,55 +1,132 @@ package io.quarkus.cache.runtime; import java.lang.annotation.Annotation; +import java.lang.reflect.Parameter; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; import javax.inject.Inject; import javax.interceptor.Interceptor.Priority; import javax.interceptor.InvocationContext; +import org.jboss.logging.Logger; + import io.quarkus.arc.runtime.InterceptorBindings; +import io.quarkus.cache.CacheKey; +import io.quarkus.cache.CacheManager; public abstract class CacheInterceptor { public static final int BASE_PRIORITY = Priority.PLATFORM_BEFORE; + private static final Logger LOGGER = Logger.getLogger(CacheInterceptor.class); + private static final String PERFORMANCE_WARN_MSG = "Cache key resolution based on reflection calls. Please create a GitHub issue in the Quarkus repository, the maintainers might be able to improve your application performance."; + @Inject - CacheRepository cacheRepository; + CacheManager cacheManager; - @SuppressWarnings("unchecked") - protected List getInterceptorBindings(InvocationContext context, Class bindingClass) { - List bindings = new ArrayList<>(); - for (Annotation binding : InterceptorBindings.getInterceptorBindings(context)) { - if (bindingClass.isInstance(binding)) { - bindings.add((T) binding); + /* + * The interception is almost always managed by Arc in a Quarkus application. In such a case, we want to retrieve the + * interceptor bindings stored by Arc in the invocation context data (very good performance-wise). But sometimes the + * interception is managed by another CDI interceptors implementation. It can happen for example while using caching + * annotations on a MicroProfile REST Client method. In that case, we have no other choice but to rely on reflection (with + * underlying synchronized blocks which are bad for performances) to retrieve the interceptor bindings. + */ + protected CacheInterceptionContext getInterceptionContext(InvocationContext invocationContext, + Class interceptorBindingClass, boolean supportsCacheKey) { + return getArcCacheInterceptionContext(invocationContext, interceptorBindingClass) + .orElseGet(new Supplier>() { + @Override + public CacheInterceptionContext get() { + return getNonArcCacheInterceptionContext(invocationContext, interceptorBindingClass, supportsCacheKey); + } + }); + } + + private Optional> getArcCacheInterceptionContext( + InvocationContext invocationContext, Class interceptorBindingClass) { + Set bindings = InterceptorBindings.getInterceptorBindings(invocationContext); + if (bindings == null) { + // This should only happen when the interception is not managed by Arc. + return Optional.empty(); + } + List interceptorBindings = new ArrayList<>(); + List cacheKeyParameterPositions = new ArrayList<>(); + for (Annotation binding : bindings) { + if (binding instanceof CacheKeyParameterPositions) { + for (short position : ((CacheKeyParameterPositions) binding).value()) { + cacheKeyParameterPositions.add(position); + } + } else if (interceptorBindingClass.isInstance(binding)) { + interceptorBindings.add(cast(binding, interceptorBindingClass)); } } - return bindings; + return Optional.of(new CacheInterceptionContext<>(interceptorBindings, cacheKeyParameterPositions)); } - protected T getInterceptorBinding(InvocationContext context, Class bindingClass) { - return getInterceptorBindings(context, bindingClass).get(0); + private CacheInterceptionContext getNonArcCacheInterceptionContext( + InvocationContext invocationContext, Class interceptorBindingClass, boolean supportsCacheKey) { + List interceptorBindings = new ArrayList<>(); + List cacheKeyParameterPositions = new ArrayList<>(); + boolean cacheKeyParameterPositionsFound = false; + for (Annotation annotation : invocationContext.getMethod().getAnnotations()) { + if (annotation instanceof CacheKeyParameterPositions) { + cacheKeyParameterPositionsFound = true; + for (short position : ((CacheKeyParameterPositions) annotation).value()) { + cacheKeyParameterPositions.add(position); + } + } else if (interceptorBindingClass.isInstance(annotation)) { + interceptorBindings.add(cast(annotation, interceptorBindingClass)); + } + } + if (supportsCacheKey && !cacheKeyParameterPositionsFound) { + /* + * This block is a fallback that should ideally never be executed because of the poor performance of reflection + * calls. If the following warn message is displayed, then it means that we should update the build time bytecode + * generation to cover the missing case. See RestClientMethodEnhancer for more details. + */ + LOGGER.warn(PERFORMANCE_WARN_MSG); + Parameter[] parameters = invocationContext.getMethod().getParameters(); + for (short i = 0; i < parameters.length; i++) { + if (parameters[i].isAnnotationPresent(CacheKey.class)) { + cacheKeyParameterPositions.add(i); + } + } + } + return new CacheInterceptionContext<>(interceptorBindings, cacheKeyParameterPositions); } - protected Object buildCacheKey(String cacheName, short[] cacheKeyParameterPositions, Object[] methodParameterValues) { - // If the method doesn't have any parameter, then a unique default key is generated and used. - if (methodParameterValues.length == 0) { - return CacheKeyBuilder.buildDefault(cacheName); - } else { + @SuppressWarnings("unchecked") + private T cast(Annotation annotation, Class interceptorBindingClass) { + return (T) annotation; + } + + protected Object getCacheKey(AbstractCache cache, List cacheKeyParameterPositions, Object[] methodParameterValues) { + if (methodParameterValues == null || methodParameterValues.length == 0) { + // If the intercepted method doesn't have any parameter, then the default cache key will be used. + return cache.getDefaultKey(); + } else if (cacheKeyParameterPositions.size() == 1) { + // If exactly one @CacheKey-annotated parameter was identified for the intercepted method at build time, then this + // parameter will be used as the cache key. + return methodParameterValues[cacheKeyParameterPositions.get(0)]; + } else if (cacheKeyParameterPositions.size() >= 2) { + // If two or more @CacheKey-annotated parameters were identified for the intercepted method at build time, then a + // composite cache key built from all these parameters will be used. List keyElements = new ArrayList<>(); - // If at least one of the method parameters is annotated with @CacheKey, then the key is composed of all - // @CacheKey-annotated parameters that were identified at build time. - if (cacheKeyParameterPositions.length > 0) { - for (int i = 0; i < cacheKeyParameterPositions.length; i++) { - keyElements.add(methodParameterValues[i]); - } - } else { - // Otherwise, the key is composed of all of the method parameters. - keyElements.addAll(Arrays.asList(methodParameterValues)); + for (short position : cacheKeyParameterPositions) { + keyElements.add(methodParameterValues[position]); } - return CacheKeyBuilder.build(keyElements); + return new CompositeCacheKey(keyElements.toArray(new Object[0])); + } else if (methodParameterValues.length == 1) { + // If the intercepted method has exactly one parameter, then this parameter will be used as the cache key. + return methodParameterValues[0]; + } else { + // If the intercepted method has two or more parameters, then a composite cache key built from all these parameters + // will be used. + return new CompositeCacheKey(methodParameterValues); } } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptor.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptor.java index 21add43b4e6f2..24ba3401c83c5 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptor.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptor.java @@ -7,25 +7,32 @@ import org.jboss.logging.Logger; -import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.cache.CacheInvalidateAll; -@CacheInvalidateAllInterceptorBinding +@CacheInvalidateAll(cacheName = "") // The `cacheName` attribute is @Nonbinding. @Interceptor @Priority(CacheInterceptor.BASE_PRIORITY) public class CacheInvalidateAllInterceptor extends CacheInterceptor { private static final Logger LOGGER = Logger.getLogger(CacheInvalidateAllInterceptor.class); + private static final String INTERCEPTOR_BINDINGS_ERROR_MSG = "The Quarkus cache extension is not working properly (CacheInvalidateAll interceptor bindings retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug"; @AroundInvoke - public Object intercept(InvocationContext context) throws Exception { - for (CacheInvalidateAllInterceptorBinding binding : getInterceptorBindings(context, - CacheInvalidateAllInterceptorBinding.class)) { - CaffeineCache cache = cacheRepository.getCache(binding.cacheName()); - if (LOGGER.isDebugEnabled()) { - LOGGER.debugf("Invalidating all entries from cache [%s]", cache.getName()); + public Object intercept(InvocationContext invocationContext) throws Exception { + CacheInterceptionContext interceptionContext = getInterceptionContext(invocationContext, + CacheInvalidateAll.class, false); + if (interceptionContext.getInterceptorBindings().isEmpty()) { + // This should never happen. + LOGGER.warn(INTERCEPTOR_BINDINGS_ERROR_MSG); + } else { + for (CacheInvalidateAll binding : interceptionContext.getInterceptorBindings()) { + AbstractCache cache = (AbstractCache) cacheManager.getCache(binding.cacheName()).get(); + if (LOGGER.isDebugEnabled()) { + LOGGER.debugf("Invalidating all entries from cache [%s]", binding.cacheName()); + } + cache.invalidateAll(); } - cache.invalidateAll(); } - return context.proceed(); + return invocationContext.proceed(); } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptorBinding.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptorBinding.java deleted file mode 100644 index e98dd8e96a5cd..0000000000000 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateAllInterceptorBinding.java +++ /dev/null @@ -1,28 +0,0 @@ -package io.quarkus.cache.runtime; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Repeatable; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.enterprise.util.Nonbinding; -import javax.interceptor.InterceptorBinding; - -import io.quarkus.cache.runtime.CacheInvalidateAllInterceptorBinding.List; - -@InterceptorBinding -@Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.TYPE, ElementType.METHOD }) -@Repeatable(List.class) -public @interface CacheInvalidateAllInterceptorBinding { - - @Nonbinding - String cacheName() default ""; - - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.METHOD) - @interface List { - CacheInvalidateAllInterceptorBinding[] value(); - } -} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptor.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptor.java index d55b8567b70d3..0cb72b16ae780 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptor.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptor.java @@ -7,29 +7,37 @@ import org.jboss.logging.Logger; -import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.cache.CacheInvalidate; -@CacheInvalidateInterceptorBinding +@CacheInvalidate(cacheName = "") // The `cacheName` attribute is @Nonbinding. @Interceptor @Priority(CacheInterceptor.BASE_PRIORITY + 1) public class CacheInvalidateInterceptor extends CacheInterceptor { private static final Logger LOGGER = Logger.getLogger(CacheInvalidateInterceptor.class); + private static final String INTERCEPTOR_BINDINGS_ERROR_MSG = "The Quarkus cache extension is not working properly (CacheInvalidate interceptor bindings retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug"; @AroundInvoke - public Object intercept(InvocationContext context) throws Exception { - Object key = null; - for (CacheInvalidateInterceptorBinding binding : getInterceptorBindings(context, - CacheInvalidateInterceptorBinding.class)) { - if (key == null) { - key = buildCacheKey(binding.cacheName(), binding.cacheKeyParameterPositions(), context.getParameters()); + public Object intercept(InvocationContext invocationContext) throws Exception { + CacheInterceptionContext interceptionContext = getInterceptionContext(invocationContext, + CacheInvalidate.class, true); + if (interceptionContext.getInterceptorBindings().isEmpty()) { + // This should never happen. + LOGGER.warn(INTERCEPTOR_BINDINGS_ERROR_MSG); + } else { + Object key = null; + for (CacheInvalidate binding : interceptionContext.getInterceptorBindings()) { + AbstractCache cache = (AbstractCache) cacheManager.getCache(binding.cacheName()).get(); + if (key == null) { + key = getCacheKey(cache, interceptionContext.getCacheKeyParameterPositions(), + invocationContext.getParameters()); + } + if (LOGGER.isDebugEnabled()) { + LOGGER.debugf("Invalidating entry with key [%s] from cache [%s]", key, binding.cacheName()); + } + cache.invalidate(key); } - CaffeineCache cache = cacheRepository.getCache(binding.cacheName()); - if (LOGGER.isDebugEnabled()) { - LOGGER.debugf("Invalidating entry with key [%s] from cache [%s]", key, cache.getName()); - } - cache.invalidate(key); } - return context.proceed(); + return invocationContext.proceed(); } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptorBinding.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptorBinding.java deleted file mode 100644 index 94da6f15a5410..0000000000000 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheInvalidateInterceptorBinding.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.quarkus.cache.runtime; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Repeatable; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.enterprise.util.Nonbinding; -import javax.interceptor.InterceptorBinding; - -import io.quarkus.cache.runtime.CacheInvalidateInterceptorBinding.List; - -@InterceptorBinding -@Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.TYPE, ElementType.METHOD }) -@Repeatable(List.class) -public @interface CacheInvalidateInterceptorBinding { - - @Nonbinding - String cacheName() default ""; - - @Nonbinding - short[] cacheKeyParameterPositions() default {}; - - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.METHOD) - @interface List { - CacheInvalidateInterceptorBinding[] value(); - } -} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyBuilder.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyBuilder.java deleted file mode 100644 index 95b4eb63dca81..0000000000000 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyBuilder.java +++ /dev/null @@ -1,93 +0,0 @@ -package io.quarkus.cache.runtime; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class CacheKeyBuilder { - - public static final String NULL_KEYS_NOT_SUPPORTED_MSG = "Null keys are not supported by the Quarkus application data cache"; - - /** - * Builds a default immutable and unique cache key from a cache name. This key is intended to be used for no-args methods - * annotated with {@link io.quarkus.cache.CacheResult CacheResult} or {@link io.quarkus.cache.CacheInvalidate - * CacheInvalidate}. - * - * @param cacheName cache name - * @return default immutable and unique cache key - */ - public static Object buildDefault(String cacheName) { - return new DefaultCacheKey(cacheName); - } - - /** - * Builds a cache key from a list of key elements. The list must be non-null and contain at least one key element. - * - * @param keyElements key elements - * @return cache key - */ - public static Object build(List keyElements) { - if (keyElements == null || keyElements.isEmpty()) { - throw new IllegalArgumentException("At least one key element is required to build a cache key"); - } else if (keyElements.size() == 1) { - if (keyElements.get(0) == null) { - throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); - } - return keyElements.get(0); - } else { - return new CompositeCacheKey(keyElements); - } - } - - private static class DefaultCacheKey { - - private final String cacheName; - - public DefaultCacheKey(String cacheName) { - this.cacheName = cacheName; - } - - @Override - public int hashCode() { - return Objects.hash(cacheName); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj instanceof DefaultCacheKey) { - DefaultCacheKey other = (DefaultCacheKey) obj; - return Objects.equals(cacheName, other.cacheName); - } - return false; - } - } - - private static class CompositeCacheKey { - - private final Object[] keyElements; - - public CompositeCacheKey(List keyElements) { - this.keyElements = keyElements.toArray(new Object[0]); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(keyElements); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (CompositeCacheKey.class.isInstance(obj)) { - final CompositeCacheKey other = (CompositeCacheKey) obj; - return Arrays.deepEquals(keyElements, other.keyElements); - } - return false; - } - } -} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyParameterPositions.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyParameterPositions.java new file mode 100644 index 0000000000000..a3e48e3668c9b --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheKeyParameterPositions.java @@ -0,0 +1,29 @@ +package io.quarkus.cache.runtime; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import javax.enterprise.util.Nonbinding; +import javax.interceptor.InterceptorBinding; + +/** + * This interceptor binding is added at build time on a method if: + *
      + *
    • it is annotated with {@link io.quarkus.cache.CacheResult CacheResult} or {@link io.quarkus.cache.CacheInvalidate + * CacheInvalidate}
    • + *
    • at least one of its arguments is annotated with {@link io.quarkus.cache.CacheKey CacheKey}
    • + *
    + * It helps improving performances by storing at build time the positions of {@link io.quarkus.cache.CacheKey + * CacheKey}-annotated arguments instead of relying on reflection at run time (which is bad for performances) to identify these + * positions. + */ +@InterceptorBinding +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +public @interface CacheKeyParameterPositions { + + @Nonbinding + short[] value() default {}; +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerImpl.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerImpl.java new file mode 100644 index 0000000000000..f1c3b0b491267 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerImpl.java @@ -0,0 +1,38 @@ +package io.quarkus.cache.runtime; + +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheManager; + +/** + * This class is registered as an @ApplicationScoped synthetic bean at build time. + */ +public class CacheManagerImpl implements CacheManager { + + private final Map caches; + private final Set cacheNames; + + public CacheManagerImpl(Map caches) { + Objects.requireNonNull(caches); + this.caches = Collections.unmodifiableMap(caches); + cacheNames = Collections.unmodifiableSet(caches.keySet()); + } + + @Override + public Set getCacheNames() { + return cacheNames; + } + + @Override + public Optional getCache(String name) { + if (name == null) { + return Optional.empty(); + } + return Optional.ofNullable(caches.get(name)); + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerInitializer.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerInitializer.java new file mode 100644 index 0000000000000..b2b9f3d275c6a --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerInitializer.java @@ -0,0 +1,17 @@ +package io.quarkus.cache.runtime; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.context.Initialized; +import javax.enterprise.event.Observes; + +import io.quarkus.cache.CacheManager; + +/** + * This class is used to eagerly create the {@link CacheManager} bean instance at STATIC_INIT execution time. + */ +public class CacheManagerInitializer { + + private static void onStaticInit(@Observes @Initialized(ApplicationScoped.class) Object event, CacheManager cacheManager) { + cacheManager.getCacheNames(); + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheProducer.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheProducer.java new file mode 100644 index 0000000000000..a8d1cffc7e54f --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheProducer.java @@ -0,0 +1,31 @@ +package io.quarkus.cache.runtime; + +import java.lang.annotation.Annotation; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.inject.Produces; +import javax.enterprise.inject.spi.InjectionPoint; +import javax.inject.Inject; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.CacheName; + +@ApplicationScoped +public class CacheProducer { + + @Inject + CacheManager cacheManager; + + @Produces + @CacheName("") // The `value` attribute is @Nonbinding. + Cache produce(InjectionPoint injectionPoint) { + for (Annotation qualifier : injectionPoint.getQualifiers()) { + if (qualifier instanceof CacheName) { + return cacheManager.getCache(((CacheName) qualifier).value()).get(); + } + } + // This will never be returned. + return null; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheRepository.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheRepository.java deleted file mode 100644 index 87b1d5df4d44c..0000000000000 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheRepository.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.quarkus.cache.runtime; - -import java.util.Collections; -import java.util.Map; - -import javax.enterprise.context.ApplicationScoped; - -import io.quarkus.cache.runtime.caffeine.CaffeineCache; - -@ApplicationScoped -public class CacheRepository { - - // There's no need for concurrency here since the map is created at build time and never modified after that. - private Map caches; - - public void setCaches(Map caches) { - if (this.caches != null) { - throw new IllegalStateException("The caches map must only be set at build time"); - } - this.caches = Collections.unmodifiableMap(caches); - } - - public CaffeineCache getCache(String cacheName) { - return caches.get(cacheName); - } -} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptor.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptor.java index 424b6581f4526..60d19de8fd61d 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptor.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptor.java @@ -1,5 +1,11 @@ package io.quarkus.cache.runtime; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; + import javax.annotation.Priority; import javax.interceptor.AroundInvoke; import javax.interceptor.Interceptor; @@ -7,23 +13,84 @@ import org.jboss.logging.Logger; -import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.cache.CacheResult; -@CacheResultInterceptorBinding +@CacheResult(cacheName = "") // The `cacheName` attribute is @Nonbinding. @Interceptor @Priority(CacheInterceptor.BASE_PRIORITY + 2) public class CacheResultInterceptor extends CacheInterceptor { private static final Logger LOGGER = Logger.getLogger(CacheResultInterceptor.class); + private static final String INTERCEPTOR_BINDING_ERROR_MSG = "The Quarkus cache extension is not working properly (CacheResult interceptor binding retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug"; @AroundInvoke - public Object intercept(InvocationContext context) throws Exception { - CacheResultInterceptorBinding binding = getInterceptorBinding(context, CacheResultInterceptorBinding.class); - Object key = buildCacheKey(binding.cacheName(), binding.cacheKeyParameterPositions(), context.getParameters()); - CaffeineCache cache = cacheRepository.getCache(binding.cacheName()); + public Object intercept(InvocationContext invocationContext) throws Throwable { + CacheInterceptionContext interceptionContext = getInterceptionContext(invocationContext, + CacheResult.class, true); + + if (interceptionContext.getInterceptorBindings().isEmpty()) { + // This should never happen. + LOGGER.warn(INTERCEPTOR_BINDING_ERROR_MSG); + return invocationContext.proceed(); + } + + CacheResult binding = interceptionContext.getInterceptorBindings().get(0); + AbstractCache cache = (AbstractCache) cacheManager.getCache(binding.cacheName()).get(); + Object key = getCacheKey(cache, interceptionContext.getCacheKeyParameterPositions(), invocationContext.getParameters()); if (LOGGER.isDebugEnabled()) { - LOGGER.debugf("Loading entry with key [%s] from cache [%s]", key, cache.getName()); + LOGGER.debugf("Loading entry with key [%s] from cache [%s]", key, binding.cacheName()); + } + + try { + + CompletableFuture cacheValue = cache.get(key, new Function() { + @Override + public Object apply(Object k) { + try { + return invocationContext.proceed(); + } catch (Exception e) { + throw new CacheException(e); + } + } + }); + + if (binding.lockTimeout() <= 0) { + return cacheValue.get(); + } else { + try { + /* + * If the current thread started the cache value computation, then the computation is already finished since + * it was done synchronously and the following call will never time out. + */ + return cacheValue.get(binding.lockTimeout(), TimeUnit.MILLISECONDS); + } catch (TimeoutException e) { + // TODO: Add statistics here to monitor the timeout. + return invocationContext.proceed(); + } + } + + } catch (ExecutionException e) { + /* + * Any exception raised during a cache computation will be encapsulated into an ExecutionException because it is + * thrown during a CompletionStage execution. + */ + if (e.getCause() instanceof CacheException) { + /* + * The ExecutionException was caused by a CacheException (most likely case). + * Let's throw the CacheException cause if possible or the CacheException itself otherwise. + */ + if (e.getCause().getCause() != null) { + throw e.getCause().getCause(); + } else { + throw e.getCause(); + } + } else if (e.getCause() != null) { + // The ExecutionException was caused by another type of Throwable (unlikely case). + throw e.getCause(); + } else { + // The ExecutionException does not have a cause (very unlikely case). + throw e; + } } - return cache.get(key, () -> context.proceed(), binding.lockTimeout()); } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptorBinding.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptorBinding.java deleted file mode 100644 index d091e9b6b5248..0000000000000 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheResultInterceptorBinding.java +++ /dev/null @@ -1,24 +0,0 @@ -package io.quarkus.cache.runtime; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.enterprise.util.Nonbinding; -import javax.interceptor.InterceptorBinding; - -@InterceptorBinding -@Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.TYPE, ElementType.METHOD }) -public @interface CacheResultInterceptorBinding { - - @Nonbinding - String cacheName() default ""; - - @Nonbinding - short[] cacheKeyParameterPositions() default {}; - - @Nonbinding - long lockTimeout() default 0; -} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CaffeineCacheSupplier.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CaffeineCacheSupplier.java new file mode 100644 index 0000000000000..29ecd2a4b5570 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CaffeineCacheSupplier.java @@ -0,0 +1,35 @@ +package io.quarkus.cache.runtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; + +import io.quarkus.arc.Arc; +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; + +public class CaffeineCacheSupplier implements Supplier> { + + @Override + public List get() { + CacheManager cacheManager = cacheManager(); + Collection names = cacheManager.getCacheNames(); + List allCaches = new ArrayList<>(names.size()); + for (String name : names) { + Optional cache = cacheManager.getCache(name); + if (cache.isPresent() && cache.get() instanceof CaffeineCache) { + allCaches.add((CaffeineCache) cache.get()); + } + } + allCaches.sort(Comparator.comparing(CaffeineCache::getName)); + return allCaches; + } + + public static CacheManager cacheManager() { + return Arc.container().instance(CacheManager.class).get(); + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CompositeCacheKey.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CompositeCacheKey.java new file mode 100644 index 0000000000000..41fe0b88a0d81 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CompositeCacheKey.java @@ -0,0 +1,45 @@ +package io.quarkus.cache.runtime; + +import java.util.Arrays; + +/** + * A composite cache key is used by the annotations caching API when a method annotated with + * {@link io.quarkus.cache.CacheResult CacheResult} or {@link io.quarkus.cache.CacheInvalidate CacheInvalidate} is invoked and + * when the cache key is composed of several of the method arguments (annotated with {@link io.quarkus.cache.CacheKey CacheKey} + * or not). + */ +public class CompositeCacheKey { + + private final Object[] keyElements; + + /** + * Constructor. + * + * @param keyElements key elements + * @throws IllegalArgumentException if no key elements are provided + */ + public CompositeCacheKey(Object... keyElements) { + if (keyElements.length == 0) { + throw new IllegalArgumentException( + "At least one key element is required to create a composite cache key instance"); + } + this.keyElements = keyElements; + } + + @Override + public int hashCode() { + return Arrays.deepHashCode(keyElements); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (CompositeCacheKey.class.isInstance(obj)) { + final CompositeCacheKey other = (CompositeCacheKey) obj; + return Arrays.deepEquals(keyElements, other.keyElements); + } + return false; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/DefaultCacheKey.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/DefaultCacheKey.java new file mode 100644 index 0000000000000..54181cffb8aa7 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/DefaultCacheKey.java @@ -0,0 +1,39 @@ +package io.quarkus.cache.runtime; + +import java.util.Objects; + +/** + * A default cache key is used by the annotations caching API when a no-args method annotated with + * {@link io.quarkus.cache.CacheResult CacheResult} or {@link io.quarkus.cache.CacheInvalidate CacheInvalidate} is invoked. + */ +public class DefaultCacheKey { + + private final String cacheName; + + /** + * Constructor. + * + * @param cacheName cache name + * @throws NullPointerException if the cache name is {@code null} + */ + public DefaultCacheKey(String cacheName) { + this.cacheName = Objects.requireNonNull(cacheName); + } + + @Override + public int hashCode() { + return Objects.hash(cacheName); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj instanceof DefaultCacheKey) { + DefaultCacheKey other = (DefaultCacheKey) obj; + return Objects.equals(cacheName, other.cacheName); + } + return false; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCache.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCache.java index 3ef488ac1c80b..e4e2ba8aa11d3 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCache.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCache.java @@ -1,20 +1,21 @@ package io.quarkus.cache.runtime.caffeine; -import static io.quarkus.cache.runtime.NullValueConverter.fromCacheValue; -import static io.quarkus.cache.runtime.NullValueConverter.toCacheValue; - import java.time.Duration; -import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Supplier; +import java.util.function.Function; import com.github.benmanes.caffeine.cache.AsyncCache; import com.github.benmanes.caffeine.cache.Caffeine; -public class CaffeineCache { +import io.quarkus.cache.runtime.AbstractCache; +import io.quarkus.cache.runtime.CacheException; +import io.quarkus.cache.runtime.NullValueConverter; + +/** + * This class is an internal Quarkus cache implementation. Do not use it explicitly from your Quarkus application. The public + * methods signatures may change without prior notice. + */ +public class CaffeineCache extends AbstractCache { private AsyncCache cache; @@ -28,12 +29,9 @@ public class CaffeineCache { private Duration expireAfterAccess; - public CaffeineCache(CaffeineCacheInfo cacheInfo, Executor executor) { + public CaffeineCache(CaffeineCacheInfo cacheInfo) { this.name = cacheInfo.name; Caffeine builder = Caffeine.newBuilder(); - if (executor != null) { - builder.executor(executor); - } if (cacheInfo.initialCapacity != null) { this.initialCapacity = cacheInfo.initialCapacity; builder.initialCapacity(cacheInfo.initialCapacity); @@ -53,55 +51,74 @@ public CaffeineCache(CaffeineCacheInfo cacheInfo, Executor executor) { cache = builder.buildAsync(); } - public Object get(Object key, Callable valueLoader, long lockTimeout) throws Exception { - if (lockTimeout <= 0) { - return fromCacheValue(cache.synchronous().get(key, k -> new MappingSupplier(valueLoader).get())); - } - - // The lock timeout logic starts here. - - /* - * If the current key is not already associated with a value in the Caffeine cache, there's no way to know if the - * current thread or another one started the missing value computation. The following variable will be used to - * determine whether or not a timeout should be triggered during the computation depending on which thread started it. - */ - boolean[] isCurrentThreadComputation = { false }; - - CompletableFuture future = cache.get(key, (k, executor) -> { - isCurrentThreadComputation[0] = true; - return CompletableFuture.supplyAsync(new MappingSupplier(valueLoader), executor); - }); + @Override + public String getName() { + return name; + } - if (isCurrentThreadComputation[0]) { - // The value is missing and its computation was started from the current thread. - // We'll wait for the result no matter how long it takes. - return fromCacheValue(future.get()); - } else { - // The value is either already present in the cache or missing and its computation was started from another thread. - // We want to retrieve it from the cache within the lock timeout delay. + /** + * Returns a {@link CompletableFuture} holding the cache value identified by {@code key}, obtaining that value from + * {@code valueLoader} if necessary. The value computation is done synchronously on the calling thread and the + * {@link CompletableFuture} is immediately completed before being returned. + * + * @param key cache key + * @param valueLoader function used to compute the cache value if {@code key} is not already associated with a value + * @return a {@link CompletableFuture} holding the cache value + * @throws CacheException if an exception is thrown during the cache value computation + */ + @Override + public CompletableFuture get(Object key, Function valueLoader) { + if (key == null) { + throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); + } + CompletableFuture newCacheValue = new CompletableFuture(); + CompletableFuture existingCacheValue = cache.asMap().putIfAbsent(key, newCacheValue); + if (existingCacheValue == null) { try { - return fromCacheValue(future.get(lockTimeout, TimeUnit.MILLISECONDS)); - } catch (TimeoutException e) { - // Timeout triggered! We don't want to wait any longer for the value computation and we'll simply invoke the - // cached method and return its result without caching it. - // TODO: Add statistics here to monitor the timeout. - return valueLoader.call(); + Object value = valueLoader.apply(key); + newCacheValue.complete(NullValueConverter.toCacheValue(value)); + } catch (Throwable t) { + cache.asMap().remove(key, newCacheValue); + newCacheValue.complete(new CaffeineComputationThrowable(t)); } + return unwrapCacheValueOrThrowable(newCacheValue); + } else { + return unwrapCacheValueOrThrowable(existingCacheValue); } } + private CompletableFuture unwrapCacheValueOrThrowable(CompletableFuture cacheValue) { + return cacheValue.thenApply(new Function() { + @Override + public Object apply(Object value) { + // If there's a throwable encapsulated into a CaffeineComputationThrowable, it must be rethrown. + if (value instanceof CaffeineComputationThrowable) { + Throwable cause = ((CaffeineComputationThrowable) value).getCause(); + if (cause instanceof RuntimeException) { + throw (RuntimeException) cause; + } else { + throw new CacheException(cause); + } + } else { + return NullValueConverter.fromCacheValue(value); + } + } + }); + } + + @Override public void invalidate(Object key) { + if (key == null) { + throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); + } cache.synchronous().invalidate(key); } + @Override public void invalidateAll() { cache.synchronous().invalidateAll(); } - public String getName() { - return name; - } - // For testing purposes only. public Integer getInitialCapacity() { return initialCapacity; @@ -122,23 +139,7 @@ public Duration getExpireAfterAccess() { return expireAfterAccess; } - private static class MappingSupplier implements Supplier { - - private final Callable valueLoader; - - public MappingSupplier(Callable valueLoader) { - this.valueLoader = valueLoader; - } - - @Override - public Object get() { - try { - return toCacheValue(valueLoader.call()); - } catch (RuntimeException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(e); - } - } + public long getSize() { + return cache.synchronous().estimatedSize(); } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCacheBuildRecorder.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCacheBuildRecorder.java index 3c06dc89e71bd..15e259a9d5b60 100644 --- a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCacheBuildRecorder.java +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineCacheBuildRecorder.java @@ -1,15 +1,17 @@ package io.quarkus.cache.runtime.caffeine; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.function.Supplier; -import org.eclipse.microprofile.context.ManagedExecutor; import org.jboss.logging.Logger; -import io.quarkus.arc.Arc; -import io.quarkus.arc.runtime.BeanContainer; -import io.quarkus.cache.runtime.CacheRepository; +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.runtime.CacheManagerImpl; import io.quarkus.runtime.annotations.Recorder; @Recorder @@ -17,24 +19,29 @@ public class CaffeineCacheBuildRecorder { private static final Logger LOGGER = Logger.getLogger(CaffeineCacheBuildRecorder.class); - public void buildCaches(BeanContainer beanContainer, - Set cacheInfos) { - // The number of caches is known at build time so we can use fixed initialCapacity and loadFactor for the caches map. - Map caches = new HashMap<>(cacheInfos.size() + 1, 1.0F); - - ManagedExecutor managedExecutor = Arc.container().instance(ManagedExecutor.class).orElse(null); - - for (CaffeineCacheInfo cacheInfo : cacheInfos) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debugf( - "Building Caffeine cache [%s] with [initialCapacity=%s], [maximumSize=%s], [expireAfterWrite=%s] and [expireAfterAccess=%s]", - cacheInfo.name, cacheInfo.initialCapacity, cacheInfo.maximumSize, cacheInfo.expireAfterWrite, - cacheInfo.expireAfterAccess); + public Supplier getCacheManagerSupplier(Set cacheInfos) { + Objects.requireNonNull(cacheInfos); + return new Supplier() { + @Override + public CacheManager get() { + if (cacheInfos.isEmpty()) { + return new CacheManagerImpl(Collections.emptyMap()); + } else { + // The number of caches is known at build time so we can use fixed initialCapacity and loadFactor for the caches map. + Map caches = new HashMap<>(cacheInfos.size() + 1, 1.0F); + for (CaffeineCacheInfo cacheInfo : cacheInfos) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debugf( + "Building Caffeine cache [%s] with [initialCapacity=%s], [maximumSize=%s], [expireAfterWrite=%s] and [expireAfterAccess=%s]", + cacheInfo.name, cacheInfo.initialCapacity, cacheInfo.maximumSize, + cacheInfo.expireAfterWrite, cacheInfo.expireAfterAccess); + } + CaffeineCache cache = new CaffeineCache(cacheInfo); + caches.put(cacheInfo.name, cache); + } + return new CacheManagerImpl(caches); + } } - CaffeineCache cache = new CaffeineCache(cacheInfo, managedExecutor); - caches.put(cacheInfo.name, cache); - } - - beanContainer.instance(CacheRepository.class).setCaches(caches); + }; } } diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineComputationThrowable.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineComputationThrowable.java new file mode 100644 index 0000000000000..761431d9b6d70 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/caffeine/CaffeineComputationThrowable.java @@ -0,0 +1,17 @@ +package io.quarkus.cache.runtime.caffeine; + +/** + * This class is used to prevent Caffeine from logging unwanted warnings. + */ +public class CaffeineComputationThrowable { + + private Throwable cause; + + public CaffeineComputationThrowable(Throwable cause) { + this.cause = cause; + } + + public Throwable getCause() { + return cause; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/devconsole/CacheDevConsoleRecorder.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/devconsole/CacheDevConsoleRecorder.java new file mode 100644 index 0000000000000..c8d1058f50983 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/devconsole/CacheDevConsoleRecorder.java @@ -0,0 +1,34 @@ +package io.quarkus.cache.runtime.devconsole; + +import java.util.Optional; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.runtime.CaffeineCacheSupplier; +import io.quarkus.cache.runtime.caffeine.CaffeineCache; +import io.quarkus.devconsole.runtime.spi.DevConsolePostHandler; +import io.quarkus.devconsole.runtime.spi.FlashScopeUtil.FlashMessageStatus; +import io.quarkus.runtime.annotations.Recorder; +import io.vertx.core.Handler; +import io.vertx.core.MultiMap; +import io.vertx.ext.web.RoutingContext; + +@Recorder +public class CacheDevConsoleRecorder { + + public Handler clearCacheHandler() { + return new DevConsolePostHandler() { + @Override + protected void handlePost(RoutingContext event, MultiMap form) throws Exception { + String cacheName = form.get("name"); + Optional cache = CaffeineCacheSupplier.cacheManager().getCache(cacheName); + if (cache.isPresent() && cache.get() instanceof CaffeineCache) { + ((CaffeineCache) cache.get()).invalidateAll(); + // redirect to the same page so we can make sure we see the updated results + flashMessage(event, "Cache for " + cacheName + " cleared"); + return; + } + flashMessage(event, "Cache for " + cacheName + " not found", FlashMessageStatus.ERROR); + } + }; + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCache.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCache.java new file mode 100644 index 0000000000000..2bd7ebdb0fb07 --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCache.java @@ -0,0 +1,40 @@ +package io.quarkus.cache.runtime.noop; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; + +import io.quarkus.cache.runtime.AbstractCache; + +/** + * This class is an internal Quarkus cache implementation. Do not use it explicitly from your Quarkus application. The public + * methods signatures may change without prior notice. + */ +public class NoOpCache extends AbstractCache { + + private static final String NAME = NoOpCache.class.getName(); + + @Override + public String getName() { + return NAME; + } + + @Override + public CompletableFuture get(Object key, Function valueLoader) { + CompletableFuture cacheValue = new CompletableFuture(); + try { + Object value = valueLoader.apply(key); + cacheValue.complete(value); + } catch (Throwable t) { + cacheValue.completeExceptionally(t); + } + return cacheValue; + } + + @Override + public void invalidate(Object key) { + } + + @Override + public void invalidateAll() { + } +} diff --git a/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCacheBuildRecorder.java b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCacheBuildRecorder.java new file mode 100644 index 0000000000000..26d6aadcddc4d --- /dev/null +++ b/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/noop/NoOpCacheBuildRecorder.java @@ -0,0 +1,37 @@ +package io.quarkus.cache.runtime.noop; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +import io.quarkus.cache.Cache; +import io.quarkus.cache.CacheManager; +import io.quarkus.cache.runtime.CacheManagerImpl; +import io.quarkus.runtime.annotations.Recorder; + +@Recorder +public class NoOpCacheBuildRecorder { + + public Supplier getCacheManagerSupplier(Set cacheNames) { + Objects.requireNonNull(cacheNames); + return new Supplier() { + @Override + public CacheManager get() { + if (cacheNames.isEmpty()) { + return new CacheManagerImpl(Collections.emptyMap()); + } else { + // The number of caches is known at build time so we can use fixed initialCapacity and loadFactor for the caches map. + Map caches = new HashMap<>(cacheNames.size() + 1, 1.0F); + NoOpCache cache = new NoOpCache(); + for (String cacheName : cacheNames) { + caches.put(cacheName, cache); + } + return new CacheManagerImpl(caches); + } + } + }; + } +} diff --git a/extensions/caffeine/deployment/src/main/java/io/quarkus/caffeine/deployment/CaffeineProcessor.java b/extensions/caffeine/deployment/src/main/java/io/quarkus/caffeine/deployment/CaffeineProcessor.java index eaabad23faae9..c3b4ad9286697 100644 --- a/extensions/caffeine/deployment/src/main/java/io/quarkus/caffeine/deployment/CaffeineProcessor.java +++ b/extensions/caffeine/deployment/src/main/java/io/quarkus/caffeine/deployment/CaffeineProcessor.java @@ -2,14 +2,23 @@ import java.io.IOException; +import org.jboss.jandex.ClassInfo; +import org.jboss.jandex.DotName; + +import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; public class CaffeineProcessor { + private static final String CACHE_LOADER_CLASS_NAME = "com.github.benmanes.caffeine.cache.CacheLoader"; + private static final DotName CACHE_LOADER_NAME = DotName.createSimple(CACHE_LOADER_CLASS_NAME); @BuildStep ReflectiveClassBuildItem cacheClasses() throws IOException { - //todo: how to we want to handle this? There are a lot of different cache classes + // for now, we register classes that have been required by our users + // registering all the implementations for reflection gives us a 14 MB penalty in native executable size + // see https://github.com/quarkusio/quarkus/issues/12961 return new ReflectiveClassBuildItem(false, false, "com.github.benmanes.caffeine.cache.SSLMS", "com.github.benmanes.caffeine.cache.SILMS", @@ -17,6 +26,19 @@ ReflectiveClassBuildItem cacheClasses() throws IOException { "com.github.benmanes.caffeine.cache.PDMS", "com.github.benmanes.caffeine.cache.SSMS", "com.github.benmanes.caffeine.cache.SSLA", - "com.github.benmanes.caffeine.cache.PSA"); + "com.github.benmanes.caffeine.cache.PSA", + "com.github.benmanes.caffeine.cache.SSMSW", + "com.github.benmanes.caffeine.cache.PSWMW", + "com.github.benmanes.caffeine.cache.SSW", + "com.github.benmanes.caffeine.cache.PSW"); + } + + @BuildStep + void cacheLoaders(CombinedIndexBuildItem combinedIndex, BuildProducer reflectiveClasses) { + reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, CACHE_LOADER_CLASS_NAME)); + + for (ClassInfo info : combinedIndex.getIndex().getAllKnownImplementors(CACHE_LOADER_NAME)) { + reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, info.name().toString())); + } } } diff --git a/extensions/caffeine/pom.xml b/extensions/caffeine/pom.xml index dd80bc2cb8f20..8ab1e106ca586 100644 --- a/extensions/caffeine/pom.xml +++ b/extensions/caffeine/pom.xml @@ -4,10 +4,10 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/caffeine/runtime/pom.xml b/extensions/caffeine/runtime/pom.xml index d7a9ad92799da..7504c562c5f26 100644 --- a/extensions/caffeine/runtime/pom.xml +++ b/extensions/caffeine/runtime/pom.xml @@ -13,6 +13,10 @@ Quarkus - Caffeine - Runtime A high performance caching library for Java 8+ + + io.quarkus + quarkus-core + com.github.ben-manes.caffeine caffeine @@ -20,6 +24,7 @@ org.graalvm.nativeimage svm + provided diff --git a/extensions/config-yaml/deployment/pom.xml b/extensions/config-yaml/deployment/pom.xml index 377e1eed0f290..fcbb526f40966 100644 --- a/extensions/config-yaml/deployment/pom.xml +++ b/extensions/config-yaml/deployment/pom.xml @@ -8,7 +8,6 @@ io.quarkus quarkus-config-yaml-parent 999-SNAPSHOT - ../ quarkus-config-yaml-deployment diff --git a/extensions/config-yaml/deployment/src/main/java/io/quarkus/config/yaml/deployment/ConfigYamlProcessor.java b/extensions/config-yaml/deployment/src/main/java/io/quarkus/config/yaml/deployment/ConfigYamlProcessor.java index 405e7e244da13..7119701d73c49 100644 --- a/extensions/config-yaml/deployment/src/main/java/io/quarkus/config/yaml/deployment/ConfigYamlProcessor.java +++ b/extensions/config-yaml/deployment/src/main/java/io/quarkus/config/yaml/deployment/ConfigYamlProcessor.java @@ -1,10 +1,12 @@ package io.quarkus.config.yaml.deployment; -import io.quarkus.config.yaml.runtime.ApplicationYamlProvider; +import io.quarkus.config.yaml.runtime.ApplicationYamlConfigSourceLoader; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.Feature; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.AdditionalBootstrapConfigSourceProviderBuildItem; +import io.quarkus.deployment.builditem.CapabilityBuildItem; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem; @@ -16,13 +18,22 @@ public FeatureBuildItem feature() { } @BuildStep - public AdditionalBootstrapConfigSourceProviderBuildItem bootstrap() { - return new AdditionalBootstrapConfigSourceProviderBuildItem(ApplicationYamlProvider.class.getName()); + CapabilityBuildItem capability() { + return new CapabilityBuildItem(Capability.CONFIG_YAML); + } + + @BuildStep + public void bootstrap( + BuildProducer additionalBootstrapConfigSourceProvider) { + additionalBootstrapConfigSourceProvider.produce(new AdditionalBootstrapConfigSourceProviderBuildItem( + ApplicationYamlConfigSourceLoader.InFileSystem.class.getName())); + additionalBootstrapConfigSourceProvider.produce(new AdditionalBootstrapConfigSourceProviderBuildItem( + ApplicationYamlConfigSourceLoader.InClassPath.class.getName())); } @BuildStep void watchYamlConfig(BuildProducer items) { - items.produce(new HotDeploymentWatchedFileBuildItem(ApplicationYamlProvider.APPLICATION_YAML)); - items.produce(new HotDeploymentWatchedFileBuildItem(ApplicationYamlProvider.APPLICATION_YML)); + items.produce(new HotDeploymentWatchedFileBuildItem("application.yaml")); + items.produce(new HotDeploymentWatchedFileBuildItem("application.yml")); } } diff --git a/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ApplicationYamlHotDeploymentTest.java b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ApplicationYamlHotDeploymentTest.java index e747da196da75..55785f32f7670 100644 --- a/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ApplicationYamlHotDeploymentTest.java +++ b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ApplicationYamlHotDeploymentTest.java @@ -1,7 +1,5 @@ package io.quarkus.config.yaml.deployment; -import static io.quarkus.config.yaml.runtime.ApplicationYamlProvider.APPLICATION_YAML; -import static io.quarkus.config.yaml.runtime.ApplicationYamlProvider.APPLICATION_YML; import static org.hamcrest.Matchers.is; import org.jboss.shrinkwrap.api.ShrinkWrap; @@ -17,8 +15,8 @@ public class ApplicationYamlHotDeploymentTest { @RegisterExtension static final QuarkusDevModeTest test = new QuarkusDevModeTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) - .addAsResource(APPLICATION_YAML) - .addAsResource(APPLICATION_YML) + .addAsResource("application.yaml") + .addAsResource("application.yml") .addClass(FooResource.class)); @Test @@ -31,13 +29,13 @@ public void testConfigReload() { .statusCode(200) .body(is("CCCC")); - test.modifyResourceFile(APPLICATION_YAML, s -> s.replace("AAAA", "BBBB")); + test.modifyResourceFile("application.yaml", s -> s.replace("AAAA", "BBBB")); RestAssured.when().get("/foo").then() .statusCode(200) .body(is("BBBB")); - test.modifyResourceFile(APPLICATION_YML, s -> s.replace("CCCC", "DDDD")); + test.modifyResourceFile("application.yml", s -> s.replace("CCCC", "DDDD")); RestAssured.when().get("/foo2").then() .statusCode(200) diff --git a/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ClassConfigPropertiesTest.java b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ClassConfigPropertiesTest.java new file mode 100644 index 0000000000000..6ce6457315bc6 --- /dev/null +++ b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/ClassConfigPropertiesTest.java @@ -0,0 +1,150 @@ +package io.quarkus.config.yaml.deployment; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.config.ConfigProperties; +import io.quarkus.test.QuarkusUnitTest; + +public class ClassConfigPropertiesTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(DummyBean.class, SqlConfiguration.class, Nested.class) + .addAsResource("configprops.yaml", "application.yaml")); + + @Inject + DummyBean dummyBean; + + @Test + public void testConfiguredValues() { + SqlConfiguration sqlConfiguration = dummyBean.sqlConfiguration; + assertEquals("defaultName", sqlConfiguration.name); + assertEquals("defaultUser", sqlConfiguration.user); + assertEquals("defaultPassword", sqlConfiguration.getPassword()); + assertEquals(100, sqlConfiguration.maxPoolSize); + assertEquals(200, sqlConfiguration.getMaxIdleTimeSeconds()); + assertEquals(Type.DEFAULT_TYPE, sqlConfiguration.type); + + assertEquals(2, sqlConfiguration.inputs.size()); + Nested firstInput = sqlConfiguration.inputs.get(0); + assertEquals("my_connection_1", firstInput.name); + assertEquals("systemuser", firstInput.user); + assertEquals("secret", firstInput.password); + assertEquals(Type.MSSQL, firstInput.getType()); + assertEquals(100, firstInput.getMaxPoolSize()); + assertEquals(150, firstInput.maxIdleTimeSeconds); + Nested secondInput = sqlConfiguration.inputs.get(1); + assertEquals("my_connection_2", secondInput.name); + assertEquals("otheruser", secondInput.user); + assertEquals("secret", secondInput.password); + assertEquals(Type.POSTGRES, secondInput.getType()); + assertEquals(10, secondInput.getMaxPoolSize()); + assertEquals(20, secondInput.maxIdleTimeSeconds); + + assertEquals(2, sqlConfiguration.getOutputs().size()); + Nested firstOutput = sqlConfiguration.getOutputs().get(0); + assertEquals("out_connection_1", firstOutput.name); + assertEquals("someuser", firstOutput.user); + assertEquals("asecret", firstOutput.password); + assertEquals(Type.MSSQL, firstOutput.getType()); + assertEquals(100, firstOutput.getMaxPoolSize()); + assertEquals(200, firstOutput.maxIdleTimeSeconds); + Nested secondOutput = sqlConfiguration.getOutputs().get(1); + assertEquals("out_connection_2", secondOutput.name); + assertEquals("someuser", secondOutput.user); + assertEquals("asecret", secondOutput.password); + assertEquals(Type.POSTGRES, secondOutput.getType()); + assertEquals(200, secondOutput.getMaxPoolSize()); + assertEquals(300, secondOutput.maxIdleTimeSeconds); + } + + @Singleton + public static class DummyBean { + @Inject + SqlConfiguration sqlConfiguration; + } + + @ConfigProperties(prefix = "sql") + public static class SqlConfiguration { + @ConfigProperty(name = "max_pool_size") + public int maxPoolSize = 50; + @ConfigProperty(name = "max_idle_time_seconds") + private int maxIdleTimeSeconds = 100; + public String name; + public String user; + private String password; + public Type type; + public List inputs; + private List outputs; + + public int getMaxIdleTimeSeconds() { + return maxIdleTimeSeconds; + } + + public void setMaxIdleTimeSeconds(int maxIdleTimeSeconds) { + this.maxIdleTimeSeconds = maxIdleTimeSeconds; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public List getOutputs() { + return outputs; + } + + public void setOutputs(List outputs) { + this.outputs = outputs; + } + } + + public static class Nested { + @ConfigProperty(name = "max_pool_size") + private int maxPoolSize = 50; + @ConfigProperty(name = "max_idle_time_seconds") + public int maxIdleTimeSeconds = 150; + public String name; + public String user; + public String password; + private Type type; + + public int getMaxPoolSize() { + return maxPoolSize; + } + + public void setMaxPoolSize(int maxPoolSize) { + this.maxPoolSize = maxPoolSize; + } + + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + } + + public enum Type { + MSSQL, + POSTGRES, + MYSQL, + DEFAULT_TYPE + } +} diff --git a/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/InterfaceConfigPropertiesTest.java b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/InterfaceConfigPropertiesTest.java new file mode 100644 index 0000000000000..bb61a594aa0a4 --- /dev/null +++ b/extensions/config-yaml/deployment/src/test/java/io/quarkus/config/yaml/deployment/InterfaceConfigPropertiesTest.java @@ -0,0 +1,135 @@ +package io.quarkus.config.yaml.deployment; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.config.ConfigProperties; +import io.quarkus.test.QuarkusUnitTest; + +public class InterfaceConfigPropertiesTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) + .addClasses(DummyBean.class, SqlConfiguration.class, SqlConfiguration.Nested.class, + SqlConfiguration.Type.class) + .addAsResource("configprops.yaml", "application.yaml")); + + @Inject + DummyBean dummyBean; + + @Test + public void testConfiguredValues() { + SqlConfiguration sqlConfiguration = dummyBean.sqlConfiguration; + assertEquals("defaultName", sqlConfiguration.getName()); + assertEquals("defaultUser", sqlConfiguration.getUser()); + assertEquals("defaultPassword", sqlConfiguration.getPassword()); + assertEquals(100, sqlConfiguration.maxPoolSize()); + assertEquals(200, sqlConfiguration.maxIdleTimeSeconds()); + assertEquals(SqlConfiguration.Type.DEFAULT_TYPE, sqlConfiguration.getType()); + + assertEquals(2, sqlConfiguration.getInputs().size()); + SqlConfiguration.Nested firstInput = sqlConfiguration.getInputs().get(0); + assertEquals("my_connection_1", firstInput.name); + assertEquals("systemuser", firstInput.user); + assertEquals("secret", firstInput.password); + assertEquals(SqlConfiguration.Type.MSSQL, firstInput.getType()); + assertEquals(100, firstInput.getMaxPoolSize()); + assertEquals(150, firstInput.maxIdleTimeSeconds); + SqlConfiguration.Nested secondInput = sqlConfiguration.getInputs().get(1); + assertEquals("my_connection_2", secondInput.name); + assertEquals("otheruser", secondInput.user); + assertEquals("secret", secondInput.password); + assertEquals(SqlConfiguration.Type.POSTGRES, secondInput.getType()); + assertEquals(10, secondInput.getMaxPoolSize()); + assertEquals(20, secondInput.maxIdleTimeSeconds); + + assertEquals(2, sqlConfiguration.getOutputs().size()); + SqlConfiguration.Nested firstOutput = sqlConfiguration.getOutputs().get(0); + assertEquals("out_connection_1", firstOutput.name); + assertEquals("someuser", firstOutput.user); + assertEquals("asecret", firstOutput.password); + assertEquals(SqlConfiguration.Type.MSSQL, firstOutput.getType()); + assertEquals(100, firstOutput.getMaxPoolSize()); + assertEquals(200, firstOutput.maxIdleTimeSeconds); + SqlConfiguration.Nested secondOutput = sqlConfiguration.getOutputs().get(1); + assertEquals("out_connection_2", secondOutput.name); + assertEquals("someuser", secondOutput.user); + assertEquals("asecret", secondOutput.password); + assertEquals(SqlConfiguration.Type.POSTGRES, secondOutput.getType()); + assertEquals(200, secondOutput.getMaxPoolSize()); + assertEquals(300, secondOutput.maxIdleTimeSeconds); + } + + @Singleton + public static class DummyBean { + @Inject + SqlConfiguration sqlConfiguration; + } + + @ConfigProperties(prefix = "sql") + public interface SqlConfiguration { + @ConfigProperty(name = "max_pool_size", defaultValue = "50") + int maxPoolSize(); + + @ConfigProperty(name = "max_idle_time_seconds", defaultValue = "100") + int maxIdleTimeSeconds(); + + String getName(); + + String getUser(); + + String getPassword(); + + Type getType(); + + List getInputs(); + + List getOutputs(); + + class Nested { + @ConfigProperty(name = "max_pool_size") + private int maxPoolSize = 50; + @ConfigProperty(name = "max_idle_time_seconds") + public int maxIdleTimeSeconds = 150; + public String name; + public String user; + public String password; + private Type type; + + public int getMaxPoolSize() { + return maxPoolSize; + } + + public void setMaxPoolSize(int maxPoolSize) { + this.maxPoolSize = maxPoolSize; + } + + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + } + + enum Type { + MSSQL, + POSTGRES, + MYSQL, + DEFAULT_TYPE + } + } + +} diff --git a/extensions/config-yaml/deployment/src/test/resources/configprops.yaml b/extensions/config-yaml/deployment/src/test/resources/configprops.yaml new file mode 100644 index 0000000000000..93f4e5984a139 --- /dev/null +++ b/extensions/config-yaml/deployment/src/test/resources/configprops.yaml @@ -0,0 +1,37 @@ +sql: + max_pool_size: 100 + max_idle_time_seconds: 200 + name: defaultName + user: defaultUser + password: defaultPassword + type: DEFAULT_TYPE + inputs: + - name: my_connection_1 + type: MSSQL + min_pool_size: 1 + max_pool_size: 100 + user: systemuser + password: secret + - name: my_connection_2 + type: POSTGRES + max_pool_size: 10 + max_idle_time_seconds: 20 + user: otheruser + password: secret + outputs: + - name: out_connection_1 + type: MSSQL + max_pool_size: 100 + min_pool_size: 1 + create_increment: 5 + max_idle_time_seconds: 200 + user: someuser + password: asecret + - name: out_connection_2 + type: POSTGRES + max_pool_size: 200 + min_pool_size: 1 + create_increment: 5 + max_idle_time_seconds: 300 + user: someuser + password: asecret diff --git a/extensions/config-yaml/pom.xml b/extensions/config-yaml/pom.xml index a51802c75422f..799d1334b02c0 100644 --- a/extensions/config-yaml/pom.xml +++ b/extensions/config-yaml/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/AbstractYamlObjectConverter.java b/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/AbstractYamlObjectConverter.java new file mode 100644 index 0000000000000..216aeb4617c03 --- /dev/null +++ b/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/AbstractYamlObjectConverter.java @@ -0,0 +1,53 @@ +package io.quarkus.config.yaml.runtime; + +import java.util.Collections; +import java.util.Map; + +import org.eclipse.microprofile.config.spi.Converter; +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.Constructor; +import org.yaml.snakeyaml.introspector.Property; +import org.yaml.snakeyaml.introspector.PropertyUtils; + +/** + * This class is used by generated code to support the list of objects use case in {@code @ConfigProperties} + */ +@SuppressWarnings("unused") +public abstract class AbstractYamlObjectConverter implements Converter { + + protected abstract Class getClazz(); + + /** + * Contains names of fields that need to be converted from the value that MP-Config has set + * to the actual name of the field in the class + */ + protected Map getFieldNameMap() { + return Collections.emptyMap(); + } + + @Override + public T convert(String value) { + org.yaml.snakeyaml.constructor.Constructor constructor = new Constructor(); + CustomPropertyUtils propertyUtils = new CustomPropertyUtils(getFieldNameMap()); + propertyUtils.setSkipMissingProperties(true); + constructor.setPropertyUtils(propertyUtils); + return new Yaml(constructor).loadAs(value, getClazz()); + } + + private static class CustomPropertyUtils extends PropertyUtils { + + private final Map fieldNameMap; + + public CustomPropertyUtils(Map fieldNameMap) { + this.fieldNameMap = fieldNameMap; + } + + @Override + public Property getProperty(Class type, String name) { + if (fieldNameMap.containsKey(name)) { + name = fieldNameMap.get(name); + } + return super.getProperty(type, name); + } + } +} diff --git a/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlConfigSourceLoader.java b/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlConfigSourceLoader.java new file mode 100644 index 0000000000000..cfb664721c4b6 --- /dev/null +++ b/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlConfigSourceLoader.java @@ -0,0 +1,69 @@ +package io.quarkus.config.yaml.runtime; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.microprofile.config.spi.ConfigSource; +import org.eclipse.microprofile.config.spi.ConfigSourceProvider; + +import io.smallrye.config.AbstractLocationConfigSourceLoader; +import io.smallrye.config.source.yaml.YamlConfigSource; + +public class ApplicationYamlConfigSourceLoader extends AbstractLocationConfigSourceLoader { + @Override + protected String[] getFileExtensions() { + return new String[] { + "yaml", + "yml" + }; + + } + + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return new YamlConfigSource(url, ordinal); + } + + public static class InClassPath extends ApplicationYamlConfigSourceLoader implements ConfigSourceProvider { + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return super.loadConfigSource(url, 255); + } + + @Override + public List getConfigSources(final ClassLoader classLoader) { + List configSources = new ArrayList<>(); + configSources.addAll(loadConfigSources("application.yaml", classLoader)); + configSources.addAll(loadConfigSources("application.yml", classLoader)); + return configSources; + } + + @Override + protected List tryFileSystem(final URI uri) { + return new ArrayList<>(); + } + } + + public static class InFileSystem extends ApplicationYamlConfigSourceLoader implements ConfigSourceProvider { + @Override + protected ConfigSource loadConfigSource(final URL url, final int ordinal) throws IOException { + return super.loadConfigSource(url, 265); + } + + @Override + public List getConfigSources(final ClassLoader classLoader) { + List configSources = new ArrayList<>(); + configSources.addAll(loadConfigSources("config/application.yaml", classLoader)); + configSources.addAll(loadConfigSources("config/application.yml", classLoader)); + return configSources; + } + + @Override + protected List tryClassPath(final URI uri, final ClassLoader classLoader) { + return new ArrayList<>(); + } + } +} diff --git a/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlProvider.java b/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlProvider.java deleted file mode 100644 index df3c86d8b62fb..0000000000000 --- a/extensions/config-yaml/runtime/src/main/java/io/quarkus/config/yaml/runtime/ApplicationYamlProvider.java +++ /dev/null @@ -1,87 +0,0 @@ -package io.quarkus.config.yaml.runtime; - -import java.io.Closeable; -import java.io.FileNotFoundException; -import java.io.IOError; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.eclipse.microprofile.config.spi.ConfigSource; -import org.eclipse.microprofile.config.spi.ConfigSourceProvider; - -import io.smallrye.config.source.yaml.YamlConfigSource; - -/** - * - */ -public final class ApplicationYamlProvider implements ConfigSourceProvider { - - public static final String APPLICATION_YAML = "application.yaml"; - private static final int APPLICATION_YAML_IN_JAR_ORDINAL = 254; - public static final String APPLICATION_YML = "application.yml"; - private static final int APPLICATION_YML_IN_JAR_ORDINAL = 253; - - @Override - public Iterable getConfigSources(final ClassLoader forClassLoader) { - List yamlSources = getConfigSourcesForFileName(APPLICATION_YAML, APPLICATION_YAML_IN_JAR_ORDINAL, - forClassLoader); - List ymlSources = getConfigSourcesForFileName(APPLICATION_YML, APPLICATION_YML_IN_JAR_ORDINAL, - forClassLoader); - if (yamlSources.isEmpty() && ymlSources.isEmpty()) { - return Collections.emptyList(); - } else if (yamlSources.isEmpty()) { - return ymlSources; - } else if (ymlSources.isEmpty()) { - return yamlSources; - } - List result = new ArrayList<>(yamlSources.size() + ymlSources.size()); - result.addAll(yamlSources); - result.addAll(ymlSources); - return result; - } - - private List getConfigSourcesForFileName(String fileName, int inJarOrdinal, ClassLoader forClassLoader) { - List sources = Collections.emptyList(); - // mirror the in-JAR application.properties - try { - InputStream str = forClassLoader.getResourceAsStream(fileName); - if (str != null) { - try (Closeable c = str) { - YamlConfigSource configSource = new YamlConfigSource(fileName, str, inJarOrdinal); - assert sources.isEmpty(); - sources = Collections.singletonList(configSource); - } - } - } catch (IOException e) { - // configuration problem should be thrown - throw new IOError(e); - } - // mirror the on-filesystem application.properties - final Path path = Paths.get("config", fileName); - if (Files.exists(path)) { - try (InputStream str = Files.newInputStream(path)) { - YamlConfigSource configSource = new YamlConfigSource(fileName, str, inJarOrdinal + 10); - if (sources.isEmpty()) { - sources = Collections.singletonList(configSource); - } else { - // todo: sources = List.of(sources.get(0), configSource); - sources = Arrays.asList(sources.get(0), configSource); - } - } catch (NoSuchFileException | FileNotFoundException e) { - // skip (race) - } catch (IOException e) { - // configuration problem should be thrown - throw new IOError(e); - } - } - return sources; - } -} diff --git a/extensions/config-yaml/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/config-yaml/runtime/src/main/resources/META-INF/quarkus-extension.yaml index be50cd6e20284..3335e7ff29e83 100644 --- a/extensions/config-yaml/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/config-yaml/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -1,4 +1,3 @@ ---- name: "YAML Configuration" metadata: keywords: @@ -9,3 +8,10 @@ metadata: - "core" status: "stable" guide: "https://quarkus.io/guides/config#yaml" + codestart: + name: "config-yaml" + kind: "example" + languages: + - "java" + - "kotlin" + artifact: "io.quarkus:quarkus-descriptor-json" diff --git a/extensions/config-yaml/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.ConfigSourceProvider b/extensions/config-yaml/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.ConfigSourceProvider index e85b2e9dda1c0..67fab24d3e354 100644 --- a/extensions/config-yaml/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.ConfigSourceProvider +++ b/extensions/config-yaml/runtime/src/main/resources/META-INF/services/org.eclipse.microprofile.config.spi.ConfigSourceProvider @@ -1 +1,2 @@ -io.quarkus.config.yaml.runtime.ApplicationYamlProvider +io.quarkus.config.yaml.runtime.ApplicationYamlConfigSourceLoader$InFileSystem +io.quarkus.config.yaml.runtime.ApplicationYamlConfigSourceLoader$InClassPath diff --git a/extensions/consul-config/deployment/pom.xml b/extensions/consul-config/deployment/pom.xml index fb678f3990a74..411e420da0eaf 100644 --- a/extensions/consul-config/deployment/pom.xml +++ b/extensions/consul-config/deployment/pom.xml @@ -17,6 +17,14 @@ io.quarkus quarkus-jackson-deployment + + io.quarkus + quarkus-mutiny-deployment + + + io.quarkus + quarkus-vertx-core-deployment + io.quarkus diff --git a/extensions/consul-config/pom.xml b/extensions/consul-config/pom.xml index 3d7259e52405d..88d7557158f4d 100644 --- a/extensions/consul-config/pom.xml +++ b/extensions/consul-config/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - quarkus-build-parent + quarkus-extensions-parent io.quarkus 999-SNAPSHOT - ../../build-parent/pom.xml + ../pom.xml 4.0.0 diff --git a/extensions/consul-config/runtime/pom.xml b/extensions/consul-config/runtime/pom.xml index 03fe8274555e1..06ecea7a8fc14 100644 --- a/extensions/consul-config/runtime/pom.xml +++ b/extensions/consul-config/runtime/pom.xml @@ -23,14 +23,16 @@ quarkus-jackson - org.apache.httpcomponents - httpclient - - - commons-logging - commons-logging - - + io.smallrye.reactive + smallrye-mutiny-vertx-web-client + + + io.quarkus + quarkus-mutiny + + + io.quarkus + quarkus-vertx-core org.jboss.logging diff --git a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigGateway.java b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigGateway.java index 72317f85d4839..593eb79eb8ce4 100644 --- a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigGateway.java +++ b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigGateway.java @@ -1,12 +1,13 @@ package io.quarkus.consul.config.runtime; -import java.io.IOException; -import java.util.Optional; +import io.smallrye.mutiny.Uni; interface ConsulConfigGateway { /** * Retrieves a value from Consul's Key / Value store using the value of {@code key} */ - Optional getValue(String key) throws IOException; + Uni getValue(String key); + + void close(); } diff --git a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProvider.java b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProvider.java index 4c01b3d67d36d..8bd60a0d15660 100644 --- a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProvider.java +++ b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProvider.java @@ -1,17 +1,19 @@ package io.quarkus.consul.config.runtime; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.concurrent.CompletionException; +import java.util.function.Consumer; import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.ConfigSourceProvider; import org.jboss.logging.Logger; +import io.smallrye.mutiny.Uni; +import io.smallrye.mutiny.groups.UniAwait; + class ConsulConfigSourceProvider implements ConfigSourceProvider { private static final Logger log = Logger.getLogger(ConsulConfigSourceProvider.class); @@ -22,7 +24,7 @@ class ConsulConfigSourceProvider implements ConfigSourceProvider { private final ResponseConfigSourceUtil responseConfigSourceUtil; public ConsulConfigSourceProvider(ConsulConfig config) { - this(config, new DefaultConsulConfigGateway(config), new ResponseConfigSourceUtil()); + this(config, new VertxConsulConfigGateway(config), new ResponseConfigSourceUtil()); } // visible for testing @@ -47,27 +49,40 @@ public Iterable getConfigSources(ClassLoader cl) { List result = new ArrayList<>(keys.size()); + List> allUnis = new ArrayList<>(); + for (Map.Entry entry : keys.entrySet()) { String fullKey = config.prefix.isPresent() ? config.prefix.get() + "/" + entry.getKey() : entry.getKey(); - log.debug("Attempting to look up value of key '" + fullKey + "' from Consul."); - - try { - Optional optionalResponse = consulConfigGateway.getValue(fullKey); - if (optionalResponse.isPresent()) { - result.add( - responseConfigSourceUtil.toConfigSource(optionalResponse.get(), entry.getValue(), config.prefix)); - } else { - String message = "Key '" + fullKey + "' not found in Consul."; - if (config.failOnMissingKey) { - throw new RuntimeException(message); + allUnis.add(consulConfigGateway.getValue(fullKey).invoke(new Consumer() { + @Override + public void accept(Response response) { + if (response != null) { + result.add( + responseConfigSourceUtil.toConfigSource(response, entry.getValue(), + config.prefix)); } else { - log.info(message); + String message = "Key '" + fullKey + "' not found in Consul."; + if (config.failOnMissingKey) { + throw new RuntimeException(message); + } else { + log.info(message); + } } } - log.debug("Done reading value of key '" + fullKey + "'"); - } catch (IOException e) { - throw new UncheckedIOException("An error occurred while attempting to fetch configuration from Consul.", e); + })); + } + + try { + UniAwait await = Uni.combine().all().unis(allUnis).discardItems().await(); + if (config.agent.connectionTimeout.isZero() && config.agent.readTimeout.isZero()) { + await.indefinitely(); + } else { + await.atMost(config.agent.connectionTimeout.plus(config.agent.readTimeout.multipliedBy(2))); } + } catch (CompletionException e) { + throw new RuntimeException("An error occurred while attempting to fetch configuration from Consul.", e); + } finally { + consulConfigGateway.close(); } return result; diff --git a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/DefaultConsulConfigGateway.java b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/DefaultConsulConfigGateway.java deleted file mode 100644 index b2482d48ad801..0000000000000 --- a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/DefaultConsulConfigGateway.java +++ /dev/null @@ -1,154 +0,0 @@ -package io.quarkus.consul.config.runtime; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.UnrecoverableKeyException; -import java.security.cert.CertificateException; -import java.util.List; -import java.util.Optional; - -import org.apache.http.HttpEntity; -import org.apache.http.HttpStatus; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.SSLConnectionSocketFactory; -import org.apache.http.conn.ssl.TrustAllStrategy; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.ssl.SSLContextBuilder; -import org.apache.http.ssl.SSLContexts; -import org.apache.http.util.EntityUtils; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; - -class DefaultConsulConfigGateway implements ConsulConfigGateway { - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - - private final ConsulConfig consulConfig; - private final SSLConnectionSocketFactory sslSocketFactory; - - public DefaultConsulConfigGateway(ConsulConfig consulConfig) { - this.consulConfig = consulConfig; - if (consulConfig.agent.trustStore.isPresent() || consulConfig.agent.keyStore.isPresent() - || consulConfig.agent.trustCerts) { - this.sslSocketFactory = createFactoryFromAgentConfig(consulConfig.agent); - } else { - this.sslSocketFactory = null; - } - - } - - private SSLConnectionSocketFactory createFactoryFromAgentConfig(ConsulConfig.AgentConfig agentConfig) { - try { - SSLContextBuilder sslContextBuilder = SSLContexts.custom(); - if (agentConfig.trustStore.isPresent()) { - sslContextBuilder = sslContextBuilder - .loadTrustMaterial(readStore(agentConfig.trustStore.get(), agentConfig.trustStorePassword), null); - } else if (agentConfig.trustCerts) { - sslContextBuilder = sslContextBuilder.loadTrustMaterial(TrustAllStrategy.INSTANCE); - } - if (agentConfig.keyStore.isPresent()) { - String keyPassword = agentConfig.keyPassword.orElse(agentConfig.keyStorePassword.orElse("")); - sslContextBuilder = sslContextBuilder.loadKeyMaterial( - readStore(agentConfig.keyStore.get(), agentConfig.keyStorePassword), keyPassword.toCharArray()); - } - return new SSLConnectionSocketFactory(sslContextBuilder.build(), NoopHostnameVerifier.INSTANCE); - } catch (NoSuchAlgorithmException | KeyManagementException | KeyStoreException | IOException | CertificateException - | UnrecoverableKeyException e) { - throw new RuntimeException(e); - } - } - - private static String findKeystoreFileType(Path keyStorePath) { - String pathName = keyStorePath.toString().toLowerCase(); - if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { - return "PKS12"; - } - return "JKS"; - } - - private static KeyStore readStore(Path keyStorePath, Optional keyStorePassword) - throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException { - - String keyStoreType = findKeystoreFileType(keyStorePath); - - InputStream classPathResource = Thread.currentThread().getContextClassLoader() - .getResourceAsStream(keyStorePath.toString()); - if (classPathResource != null) { - try (InputStream is = classPathResource) { - return doReadStore(is, keyStoreType, keyStorePassword); - } - } else { - try (InputStream is = Files.newInputStream(keyStorePath)) { - return doReadStore(is, keyStoreType, keyStorePassword); - } - } - } - - private static KeyStore doReadStore(InputStream keyStoreStream, String keyStoreType, Optional keyStorePassword) - throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException { - KeyStore keyStore = KeyStore.getInstance(keyStoreType); - keyStore.load(keyStoreStream, keyStorePassword.isPresent() ? keyStorePassword.get().toCharArray() : null); - return keyStore; - } - - @Override - public Optional getValue(String key) throws IOException { - RequestConfig requestConfig = RequestConfig.custom() - .setConnectionRequestTimeout((int) consulConfig.agent.connectionTimeout.toMillis()) - .setSocketTimeout((int) consulConfig.agent.readTimeout.toMillis()) - .build(); - HttpClientBuilder httpClientBuilder = HttpClients.custom().setDefaultRequestConfig(requestConfig); - if (sslSocketFactory != null) { - httpClientBuilder.setSSLSocketFactory(sslSocketFactory); - } - try (CloseableHttpClient client = httpClientBuilder.build()) { - String finalUri = (consulConfig.agent.useHttps ? "https" : "http") + "://" - + consulConfig.agent.hostPort.getHostName() + ":" + consulConfig.agent.hostPort.getPort() - + "/v1/kv/" - + key; - HttpGet request = new HttpGet(finalUri); - request.addHeader("Accept", "application/json"); - if (consulConfig.agent.token.isPresent()) { - request.addHeader("Authorization", "Bearer " + consulConfig.agent.token.get()); - } - - try (CloseableHttpResponse response = client.execute(request)) { - int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode == HttpStatus.SC_NOT_FOUND) { - return Optional.empty(); - } - if (statusCode != HttpStatus.SC_OK) { - throw new RuntimeException("Got unexpected HTTP response code " + statusCode - + " from " + finalUri); - } - HttpEntity entity = response.getEntity(); - if (entity == null) { - throw new RuntimeException("Got empty HTTP response body " + finalUri); - } - - List value = OBJECT_MAPPER.readValue(EntityUtils.toString(entity), - new TypeReference>() { - }); - if (value.size() != 1) { - throw new IllegalStateException( - "Consul returned an unexpected number of results when looking up value of key '" + key + "'"); - } - return Optional.of(value.get(0)); - } - } - } -} diff --git a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/Response.java b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/Response.java index c12df522f990b..c5e39bef2b4ce 100644 --- a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/Response.java +++ b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/Response.java @@ -3,9 +3,6 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - /** * Represent (part of) the JSON response from Consul * @@ -16,8 +13,7 @@ public class Response { private final String key; private final String value; - @JsonCreator - public Response(@JsonProperty("Key") String key, @JsonProperty("Value") String value) { + public Response(String key, String value) { this.key = key; this.value = value; } diff --git a/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/VertxConsulConfigGateway.java b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/VertxConsulConfigGateway.java new file mode 100644 index 0000000000000..1548e997d0e32 --- /dev/null +++ b/extensions/consul-config/runtime/src/main/java/io/quarkus/consul/config/runtime/VertxConsulConfigGateway.java @@ -0,0 +1,167 @@ +package io.quarkus.consul.config.runtime; + +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Optional; + +import org.jboss.logging.Logger; + +import io.smallrye.mutiny.Uni; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.core.net.JksOptions; +import io.vertx.core.net.KeyStoreOptionsBase; +import io.vertx.core.net.PfxOptions; +import io.vertx.ext.web.client.WebClientOptions; +import io.vertx.mutiny.core.Vertx; +import io.vertx.mutiny.core.buffer.Buffer; +import io.vertx.mutiny.ext.web.client.HttpRequest; +import io.vertx.mutiny.ext.web.client.WebClient; + +public class VertxConsulConfigGateway implements ConsulConfigGateway { + + private static final Logger log = Logger.getLogger(VertxConsulConfigGateway.class); + + private static final String PKS_12 = "PKS12"; + private static final String JKS = "JKS"; + + private final ConsulConfig consulConfig; + private final Vertx vertx; + private final WebClient webClient; + + public VertxConsulConfigGateway(ConsulConfig consulConfig) { + this.consulConfig = consulConfig; + this.vertx = Vertx.vertx(); + this.webClient = createHttpClient(vertx, consulConfig.agent); + } + + public static WebClient createHttpClient(Vertx vertx, ConsulConfig.AgentConfig agentConfig) { + + WebClientOptions webClientOptions = new WebClientOptions() + .setConnectTimeout((int) agentConfig.connectionTimeout.toMillis()) + .setIdleTimeout((int) agentConfig.readTimeout.getSeconds()); + + boolean trustAll = agentConfig.trustCerts; + try { + if (agentConfig.trustStore.isPresent()) { + Path trustStorePath = agentConfig.trustStore.get(); + String type = determineStoreType(trustStorePath); + KeyStoreOptionsBase storeOptions = storeOptions(trustStorePath, agentConfig.trustStorePassword, + createStoreOptions(type)); + if (isPfx(type)) { + webClientOptions.setPfxTrustOptions((PfxOptions) storeOptions); + } else { + webClientOptions.setTrustStoreOptions((JksOptions) storeOptions); + } + } else if (trustAll) { + skipVerify(webClientOptions); + } else if (agentConfig.keyStore.isPresent()) { + Path trustStorePath = agentConfig.keyStore.get(); + String type = determineStoreType(trustStorePath); + KeyStoreOptionsBase storeOptions = storeOptions(trustStorePath, agentConfig.keyStorePassword, + createStoreOptions(type)); + if (isPfx(type)) { + webClientOptions.setPfxTrustOptions((PfxOptions) storeOptions); + } else { + webClientOptions.setTrustStoreOptions((JksOptions) storeOptions); + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } + + return WebClient.create(vertx, webClientOptions); + } + + private static void skipVerify(WebClientOptions options) { + options.setTrustAll(true); + options.setVerifyHost(false); + } + + private static KeyStoreOptionsBase createStoreOptions(String type) { + if (isPfx(type)) { + return new PfxOptions(); + } + return new JksOptions(); + } + + private static boolean isPfx(String type) { + return PKS_12.equals(type); + } + + private static KeyStoreOptionsBase storeOptions(Path storePath, + Optional storePassword, T store) throws Exception { + return store + .setPassword(storePassword.orElse("")) + .setValue(io.vertx.core.buffer.Buffer.buffer(storeBytes(storePath))); + } + + private static String determineStoreType(Path keyStorePath) { + String pathName = keyStorePath.toString().toLowerCase(); + if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { + return PKS_12; + } + return JKS; + } + + private static byte[] storeBytes(Path keyStorePath) + throws Exception { + InputStream classPathResource = Thread.currentThread().getContextClassLoader() + .getResourceAsStream(keyStorePath.toString()); + if (classPathResource != null) { + try (InputStream is = classPathResource) { + return allBytes(is); + } + } else { + try (InputStream is = Files.newInputStream(keyStorePath)) { + return allBytes(is); + } + } + } + + private static byte[] allBytes(InputStream inputStream) throws Exception { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + int nRead; + byte[] data = new byte[1024]; + while ((nRead = inputStream.read(data, 0, data.length)) != -1) { + buffer.write(data, 0, nRead); + } + buffer.flush(); + return buffer.toByteArray(); + } + + @Override + public Uni getValue(String key) { + HttpRequest request = webClient + .get(consulConfig.agent.hostPort.getPort(), consulConfig.agent.hostPort.getHostString(), "/v1/kv/" + key) + .ssl(consulConfig.agent.useHttps) + .putHeader("Accept", "application/json;charset=UTF-8"); + if (consulConfig.agent.token.isPresent()) { + request.putHeader("Authorization", "Bearer " + consulConfig.agent.token.get()); + } + + log.debug("Attempting to look up value of key '" + key + "' from Consul."); + return request.send().map(r -> { + if (r.statusCode() != 200) { + log.debug("Look up of key '" + key + "' from Consul yielded a non success HTTP error-code: " + r.statusCode()); + return null; + } else { + JsonArray jsonArray = r.bodyAsJsonArray(); + if (jsonArray.size() != 1) { + throw new IllegalStateException( + "Consul returned an unexpected number of results when looking up value of key '" + key + "'"); + } + JsonObject jsonObject = jsonArray.getJsonObject(0); + return new Response(jsonObject.getString("Key"), jsonObject.getString("Value")); + } + }); + } + + @Override + public void close() { + this.webClient.close(); + this.vertx.closeAndAwait(); + } +} diff --git a/extensions/consul-config/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/consul-config/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 3bad0aac55319..f3d83e1724b11 100644 --- a/extensions/consul-config/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/consul-config/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -7,5 +7,5 @@ metadata: - "configuration" - "consul" categories: - - "configuration" + - "core" status: "preview" diff --git a/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProviderTest.java b/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProviderTest.java index 979cf3af256aa..fff9a6aea4906 100644 --- a/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProviderTest.java +++ b/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ConsulConfigSourceProviderTest.java @@ -1,6 +1,7 @@ package io.quarkus.consul.config.runtime; -import static io.quarkus.consul.config.runtime.ResponseUtil.createOptionalResponse; +import static io.quarkus.consul.config.runtime.ResponseUtil.emptyResponse; +import static io.quarkus.consul.config.runtime.ResponseUtil.validResponse; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.entry; @@ -12,6 +13,7 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.time.Duration; import java.util.Arrays; import java.util.List; import java.util.Optional; @@ -45,9 +47,9 @@ void testWithMissingKeysAndFailureConfigured() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); // make sure the first is is properly resolved - when(mockGateway.getValue("some/first")).thenReturn(createOptionalResponse("some/first", "whatever")); + when(mockGateway.getValue("some/first")).thenReturn(validResponse("some/first", "whatever")); // make sure the second is not resolved - when(mockGateway.getValue("some/second")).thenReturn(Optional.empty()); + when(mockGateway.getValue("some/second")).thenReturn(emptyResponse()); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); @@ -68,21 +70,21 @@ void testWithMissingKeysAndIgnoreFailureConfigured() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); // make sure the first is is properly resolved - when(mockGateway.getValue("some/first")).thenReturn(createOptionalResponse("some/first", "whatever")); + when(mockGateway.getValue("some/first")).thenReturn(validResponse("some/first", "whatever")); // make sure the second is not resolved - when(mockGateway.getValue("some/second")).thenReturn(Optional.empty()); + when(mockGateway.getValue("some/second")).thenReturn(emptyResponse()); // make sure the third is is properly resolved - when(mockGateway.getValue("some/third")).thenReturn(createOptionalResponse("some/third", "other")); + when(mockGateway.getValue("some/third")).thenReturn(validResponse("some/third", "other")); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); Iterable configSources = sut.getConfigSources(null); assertThat(configSources).hasSize(2); - assertThat(configSources).filteredOn(c -> c.getName().contains("first")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("first")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("some.first", "whatever")); }); - assertThat(configSources).filteredOn(c -> c.getName().contains("third")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("third")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("some.third", "other")); }); @@ -100,19 +102,19 @@ void testRawKeysWithoutPrefix() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); when(mockGateway.getValue("greeting/message")) - .thenReturn(createOptionalResponse("greeting/message", "hello")); + .thenReturn(validResponse("greeting/message", "hello")); when(mockGateway.getValue("greeting/name")) - .thenReturn(createOptionalResponse("greeting/name", "quarkus")); + .thenReturn(validResponse("greeting/name", "quarkus")); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); Iterable configSources = sut.getConfigSources(null); assertThat(configSources).hasSize(2); - assertThat(configSources).filteredOn(c -> c.getName().contains("message")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("message")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.message", "hello")); }); - assertThat(configSources).filteredOn(c -> c.getName().contains("name")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("name")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.name", "quarkus")); }); @@ -129,19 +131,19 @@ void testRawKeysWithPrefix() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); when(mockGateway.getValue("whatever/greeting/message")) - .thenReturn(createOptionalResponse("whatever/greeting/message", "hello")); + .thenReturn(validResponse("whatever/greeting/message", "hello")); when(mockGateway.getValue("whatever/greeting/name")) - .thenReturn(createOptionalResponse("whatever/greeting/name", "quarkus")); + .thenReturn(validResponse("whatever/greeting/name", "quarkus")); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); Iterable configSources = sut.getConfigSources(null); assertThat(configSources).hasSize(2); - assertThat(configSources).filteredOn(c -> c.getName().contains("message")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("message")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.message", "hello")); }); - assertThat(configSources).filteredOn(c -> c.getName().contains("name")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("name")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.name", "quarkus")); }); @@ -157,20 +159,20 @@ void testPropertiesKeysWithoutPrefix() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); when(mockGateway.getValue("first")) - .thenReturn(createOptionalResponse("first", "greeting.message=hi\ngreeting.name=quarkus")); + .thenReturn(validResponse("first", "greeting.message=hi\ngreeting.name=quarkus")); when(mockGateway.getValue("second")) - .thenReturn(createOptionalResponse("second", "other.key=value")); + .thenReturn(validResponse("second", "other.key=value")); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); Iterable configSources = sut.getConfigSources(null); assertThat(configSources).hasSize(2); - assertThat(configSources).filteredOn(c -> c.getName().contains("first")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("first")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.message", "hi"), entry("greeting.name", "quarkus")); }); - assertThat(configSources).filteredOn(c -> c.getName().contains("second")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("second")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("other.key", "value")); }); @@ -187,20 +189,20 @@ void testPropertiesKeysWithPrefix() throws IOException { ConsulConfigGateway mockGateway = mock(ConsulConfigGateway.class); when(mockGateway.getValue("config/first")) - .thenReturn(createOptionalResponse("config/first", "greeting.message=hi\ngreeting.name=quarkus")); + .thenReturn(validResponse("config/first", "greeting.message=hi\ngreeting.name=quarkus")); when(mockGateway.getValue("config/second")) - .thenReturn(createOptionalResponse("config/second", "other.key=value")); + .thenReturn(validResponse("config/second", "other.key=value")); ConsulConfigSourceProvider sut = new ConsulConfigSourceProvider(config, mockGateway); Iterable configSources = sut.getConfigSources(null); assertThat(configSources).hasSize(2); - assertThat(configSources).filteredOn(c -> c.getName().contains("first")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("first")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("greeting.message", "hi"), entry("greeting.name", "quarkus")); }); - assertThat(configSources).filteredOn(c -> c.getName().contains("second")).hasOnlyOneElementSatisfying(c -> { + assertThat(configSources).filteredOn(c -> c.getName().contains("second")).singleElement().satisfies(c -> { assertThat(c.getOrdinal()).isEqualTo(EXPECTED_ORDINAL); assertThat(c.getProperties()).containsOnly(entry("other.key", "value")); }); @@ -217,6 +219,8 @@ private ConsulConfig defaultConfig() { config.propertiesValueKeys = Optional.empty(); config.prefix = Optional.empty(); config.agent = new ConsulConfig.AgentConfig(); + config.agent.readTimeout = Duration.ofSeconds(10); + config.agent.connectionTimeout = Duration.ofSeconds(10); return config; } diff --git a/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ResponseUtil.java b/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ResponseUtil.java index 415b2b95fc2dd..08900bde568de 100644 --- a/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ResponseUtil.java +++ b/extensions/consul-config/runtime/src/test/java/io/quarkus/consul/config/runtime/ResponseUtil.java @@ -1,18 +1,19 @@ package io.quarkus.consul.config.runtime; import java.util.Base64; -import java.util.Optional; + +import io.smallrye.mutiny.Uni; final class ResponseUtil { private ResponseUtil() { } - static Response createResponse(String key, String rawValue) { - return new Response(key, Base64.getEncoder().encodeToString(rawValue.getBytes())); + static Uni validResponse(String key, String rawValue) { + return Uni.createFrom().item(new Response(key, Base64.getEncoder().encodeToString(rawValue.getBytes()))); } - static Optional createOptionalResponse(String key, String rawValue) { - return Optional.of(createResponse(key, rawValue)); + static Uni emptyResponse() { + return Uni.createFrom().nullItem(); } } diff --git a/extensions/container-image/container-image-docker/deployment/pom.xml b/extensions/container-image/container-image-docker/deployment/pom.xml index ba0177762c944..32e5e5f7394c5 100644 --- a/extensions/container-image/container-image-docker/deployment/pom.xml +++ b/extensions/container-image/container-image-docker/deployment/pom.xml @@ -11,7 +11,7 @@ quarkus-container-image-docker-deployment - Quarkus - Container - Image - Docker - Deployment + Quarkus - Container Image - Docker - Deployment diff --git a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java index df2b94c3378ca..7b7032c7f21ad 100644 --- a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java +++ b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java @@ -15,6 +15,6 @@ public class DockerBuild implements BooleanSupplier { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(DockerProcessor.DOCKER)).orElse(true); + return containerImageConfig.builder.map(b -> b.equals(DockerProcessor.DOCKER_CONTAINER_IMAGE_NAME)).orElse(true); } } diff --git a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerConfig.java b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerConfig.java index 7a7bedbce1200..eae7920acc860 100644 --- a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerConfig.java +++ b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerConfig.java @@ -40,4 +40,10 @@ public class DockerConfig { */ @ConfigItem public Optional> cacheFrom; + + /** + * Name of binary used to execute the docker commands. + */ + @ConfigItem(defaultValue = "docker") + public String executableName; } diff --git a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerProcessor.java b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerProcessor.java index 54277ac78d738..00773933ba4b9 100644 --- a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerProcessor.java +++ b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerProcessor.java @@ -24,15 +24,17 @@ import io.quarkus.container.image.deployment.ContainerImageConfig; import io.quarkus.container.image.deployment.util.NativeBinaryUtil; +import io.quarkus.container.spi.AvailableContainerImageExtensionBuildItem; import io.quarkus.container.spi.ContainerImageBuildRequestBuildItem; import io.quarkus.container.spi.ContainerImageInfoBuildItem; import io.quarkus.container.spi.ContainerImagePushRequestBuildItem; import io.quarkus.deployment.Capability; -import io.quarkus.deployment.IsNormal; +import io.quarkus.deployment.IsNormalNotRemoteDev; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.CapabilityBuildItem; import io.quarkus.deployment.pkg.PackageConfig; +import io.quarkus.deployment.pkg.builditem.AppCDSResultBuildItem; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; import io.quarkus.deployment.pkg.builditem.JarBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; @@ -43,29 +45,36 @@ public class DockerProcessor { private static final Logger log = Logger.getLogger(DockerProcessor.class); + private static final String DOCKER = "docker"; private static final String DOCKERFILE_JVM = "Dockerfile.jvm"; - private static final String DOCKERFILE_FAST_JAR = "Dockerfile.fast-jar"; + private static final String DOCKERFILE_LEGACY_JAR = "Dockerfile.legacy-jar"; private static final String DOCKERFILE_NATIVE = "Dockerfile.native"; - public static final String DOCKER_BINARY_NAME = "docker"; - public static final String DOCKER = "docker"; + private static final String DOCKER_DIRECTORY_NAME = "docker"; + static final String DOCKER_CONTAINER_IMAGE_NAME = "docker"; private final DockerWorking dockerWorking = new DockerWorking(); + @BuildStep + public AvailableContainerImageExtensionBuildItem availability() { + return new AvailableContainerImageExtensionBuildItem(DOCKER); + } + @BuildStep(onlyIf = DockerBuild.class) public CapabilityBuildItem capability() { return new CapabilityBuildItem(Capability.CONTAINER_IMAGE_DOCKER); } - @BuildStep(onlyIf = { IsNormal.class, DockerBuild.class }, onlyIfNot = NativeBuild.class) + @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, DockerBuild.class }, onlyIfNot = NativeBuild.class) public void dockerBuildFromJar(DockerConfig dockerConfig, - ContainerImageConfig containerImageConfig, // TODO: use to check whether we need to also push to registry + ContainerImageConfig containerImageConfig, OutputTargetBuildItem out, - ContainerImageInfoBuildItem containerImage, + ContainerImageInfoBuildItem containerImageInfo, Optional buildRequest, Optional pushRequest, + @SuppressWarnings("unused") Optional appCDSResult, // ensure docker build will be performed after AppCDS creation BuildProducer artifactResultProducer, PackageConfig packageConfig, - // used to ensure that the jar has been built + @SuppressWarnings("unused") // used to ensure that the jar has been built JarBuildItem jar) { if (!containerImageConfig.build && !containerImageConfig.push && !buildRequest.isPresent() @@ -79,17 +88,16 @@ public void dockerBuildFromJar(DockerConfig dockerConfig, log.info("Building docker image for jar."); - String image = containerImage.getImage(); - List additionalImageTags = containerImage.getAdditionalImageTags(); - ImageIdReader reader = new ImageIdReader(); - createContainerImage(containerImageConfig, dockerConfig, image, additionalImageTags, out, reader, false, + String builtContainerImage = createContainerImage(containerImageConfig, dockerConfig, containerImageInfo, out, reader, + false, pushRequest.isPresent(), packageConfig); - artifactResultProducer.produce(new ArtifactResultBuildItem(null, "jar-container", Collections.emptyMap())); + artifactResultProducer.produce(new ArtifactResultBuildItem(null, "jar-container", + Collections.singletonMap("container-image", builtContainerImage))); } - @BuildStep(onlyIf = { IsNormal.class, NativeBuild.class, DockerBuild.class }) + @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, NativeBuild.class, DockerBuild.class }) public void dockerBuildFromNativeImage(DockerConfig dockerConfig, ContainerImageConfig containerImageConfig, ContainerImageInfoBuildItem containerImage, @@ -117,57 +125,59 @@ public void dockerBuildFromNativeImage(DockerConfig dockerConfig, log.info("Starting docker image build"); - String image = containerImage.getImage(); - List additionalImageTags = containerImage.getAdditionalImageTags(); - ImageIdReader reader = new ImageIdReader(); - createContainerImage(containerImageConfig, dockerConfig, image, additionalImageTags, out, reader, true, + String builtContainerImage = createContainerImage(containerImageConfig, dockerConfig, containerImage, out, reader, true, pushRequest.isPresent(), packageConfig); - artifactResultProducer.produce(new ArtifactResultBuildItem(null, "native-container", Collections.emptyMap())); + artifactResultProducer.produce(new ArtifactResultBuildItem(null, "native-container", + Collections.singletonMap("container-image", builtContainerImage))); } - private void createContainerImage(ContainerImageConfig containerImageConfig, DockerConfig dockerConfig, String image, - List additionalImageTags, + private String createContainerImage(ContainerImageConfig containerImageConfig, DockerConfig dockerConfig, + ContainerImageInfoBuildItem containerImageInfo, OutputTargetBuildItem out, ImageIdReader reader, boolean forNative, boolean pushRequested, PackageConfig packageConfig) { DockerfilePaths dockerfilePaths = getDockerfilePaths(dockerConfig, forNative, packageConfig, out); - String[] dockerArgs = getDockerArgs(image, dockerfilePaths, dockerConfig); - log.infof("Executing the following command to build docker image: '%s %s'", DOCKER_BINARY_NAME, + String[] dockerArgs = getDockerArgs(containerImageInfo.getImage(), dockerfilePaths, dockerConfig); + log.infof("Executing the following command to build docker image: '%s %s'", dockerConfig.executableName, String.join(" ", dockerArgs)); - boolean buildSuccessful = ExecUtil.exec(out.getOutputDirectory().toFile(), reader, DOCKER_BINARY_NAME, dockerArgs); + boolean buildSuccessful = ExecUtil.exec(out.getOutputDirectory().toFile(), reader, dockerConfig.executableName, + dockerArgs); if (!buildSuccessful) { throw dockerException(dockerArgs); } - log.infof("Built container image %s (%s)\n", image, reader.getImageId()); + log.infof("Built container image %s (%s)\n", containerImageInfo.getImage(), reader.getImageId()); - if (!additionalImageTags.isEmpty()) { - createAdditionalTags(image, additionalImageTags); + if (!containerImageInfo.getAdditionalImageTags().isEmpty()) { + createAdditionalTags(containerImageInfo.getImage(), containerImageInfo.getAdditionalImageTags(), dockerConfig); } if (pushRequested || containerImageConfig.push) { String registry = "docker.io"; - if (!containerImageConfig.registry.isPresent()) { + if (!containerImageInfo.getRegistry().isPresent()) { log.info("No container image registry was set, so 'docker.io' will be used"); } else { - registry = containerImageConfig.registry.get(); + registry = containerImageInfo.getRegistry().get(); } // Check if we need to login first if (containerImageConfig.username.isPresent() && containerImageConfig.password.isPresent()) { - boolean loginSuccessful = ExecUtil.exec("docker", "login", registry, "-u", containerImageConfig.username.get(), + boolean loginSuccessful = ExecUtil.exec(dockerConfig.executableName, "login", registry, "-u", + containerImageConfig.username.get(), "-p" + containerImageConfig.password.get()); if (!loginSuccessful) { throw dockerException(new String[] { "-u", containerImageConfig.username.get(), "-p", "********" }); } } - List imagesToPush = new ArrayList<>(additionalImageTags); - imagesToPush.add(image); + List imagesToPush = new ArrayList<>(containerImageInfo.getAdditionalImageTags()); + imagesToPush.add(containerImageInfo.getImage()); for (String imageToPush : imagesToPush) { - pushImage(imageToPush); + pushImage(imageToPush, dockerConfig); } } + + return containerImageInfo.getImage(); } private String[] getDockerArgs(String image, DockerfilePaths dockerfilePaths, DockerConfig dockerConfig) { @@ -188,19 +198,19 @@ private String[] getDockerArgs(String image, DockerfilePaths dockerfilePaths, Do return dockerArgs.toArray(new String[0]); } - private void createAdditionalTags(String image, List additionalImageTags) { + private void createAdditionalTags(String image, List additionalImageTags, DockerConfig dockerConfig) { for (String additionalTag : additionalImageTags) { String[] tagArgs = { "tag", image, additionalTag }; - boolean tagSuccessful = ExecUtil.exec("docker", tagArgs); + boolean tagSuccessful = ExecUtil.exec(dockerConfig.executableName, tagArgs); if (!tagSuccessful) { throw dockerException(tagArgs); } } } - private void pushImage(String image) { + private void pushImage(String image, DockerConfig dockerConfig) { String[] pushArgs = { "push", image }; - boolean pushSuccessful = ExecUtil.exec("docker", pushArgs); + boolean pushSuccessful = ExecUtil.exec(dockerConfig.executableName, pushArgs); if (!pushSuccessful) { throw dockerException(pushArgs); } @@ -225,9 +235,8 @@ private DockerfilePaths getDockerfilePaths(DockerConfig dockerConfig, boolean fo } else { if (dockerConfig.dockerfileJvmPath.isPresent()) { return ProvidedDockerfile.get(Paths.get(dockerConfig.dockerfileJvmPath.get()), outputDirectory); - } else if (packageConfig.type.equals(PackageConfig.FAST_JAR) - || packageConfig.type.equalsIgnoreCase(PackageConfig.MUTABLE_JAR)) { - return DockerfileDetectionResult.detect(DOCKERFILE_FAST_JAR, outputDirectory); + } else if (packageConfig.type.equals(PackageConfig.LEGACY_JAR)) { + return DockerfileDetectionResult.detect(DOCKERFILE_LEGACY_JAR, outputDirectory); } else { return DockerfileDetectionResult.detect(DOCKERFILE_JVM, outputDirectory); } @@ -312,7 +321,7 @@ private static Map.Entry findDockerfileRoot(Path outputDirectory) { if (mainSourcesRoot == null) { return null; } - Path dockerfilesRoot = mainSourcesRoot.getKey().resolve("docker"); + Path dockerfilesRoot = mainSourcesRoot.getKey().resolve(DOCKER_DIRECTORY_NAME); if (!dockerfilesRoot.toFile().exists()) { return null; } @@ -337,7 +346,7 @@ public static ProvidedDockerfile get(Path dockerfilePath, Path outputDirectory) } Path effectiveDockerfilePath = dockerfilePath.isAbsolute() ? dockerfilePath : mainSourcesRoot.getValue().resolve(dockerfilePath); - if (!dockerfilePath.toFile().exists()) { + if (!effectiveDockerfilePath.toFile().exists()) { throw new IllegalArgumentException( "Specified Dockerfile path " + effectiveDockerfilePath.toAbsolutePath().toString() + " does not exist"); } diff --git a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerWorking.java b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerWorking.java index 395f121de8829..a396ae16e191e 100644 --- a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerWorking.java +++ b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerWorking.java @@ -25,7 +25,7 @@ public boolean getAsBoolean() { return false; } } catch (Exception e) { - LOGGER.warn("No Docker binary found"); + LOGGER.warnf("No Docker binary found or general error: %s", e); return false; } diff --git a/extensions/container-image/container-image-docker/pom.xml b/extensions/container-image/container-image-docker/pom.xml index afe09d8500ce7..9e778d91943f8 100644 --- a/extensions/container-image/container-image-docker/pom.xml +++ b/extensions/container-image/container-image-docker/pom.xml @@ -10,7 +10,7 @@ 4.0.0 quarkus-container-image-docker-parent - Quarkus - Container - Image - Docker - Parent + Quarkus - Container Image - Docker - Parent pom deployment diff --git a/extensions/container-image/container-image-docker/runtime/pom.xml b/extensions/container-image/container-image-docker/runtime/pom.xml index 20fb4dedcc4d6..53c73e0d5ef5e 100644 --- a/extensions/container-image/container-image-docker/runtime/pom.xml +++ b/extensions/container-image/container-image-docker/runtime/pom.xml @@ -11,9 +11,15 @@ quarkus-container-image-docker - Quarkus - Container - Image - Docker + Quarkus - Container Image - Docker Build container images of your application using Docker + + + io.quarkus + quarkus-container-image + + diff --git a/extensions/container-image/container-image-jib/deployment/pom.xml b/extensions/container-image/container-image-jib/deployment/pom.xml index 866a74c77eb54..d595ade0ff69a 100644 --- a/extensions/container-image/container-image-jib/deployment/pom.xml +++ b/extensions/container-image/container-image-jib/deployment/pom.xml @@ -11,7 +11,7 @@ quarkus-container-image-jib-deployment - Quarkus - Container - Image - Jib - Deployment + Quarkus - Container Image - Jib - Deployment diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerBuilderHelper.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerBuilderHelper.java index 2769f1abc90c1..58cf61c7a4eaa 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerBuilderHelper.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerBuilderHelper.java @@ -5,7 +5,7 @@ * use this file except in compliance with the License. You may obtain a copy of * the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT @@ -84,8 +84,8 @@ public static FileEntriesLayer extraDirectoryLayerConfiguration( // Add with default permissions if (localPath.toFile().canExecute()) { - // make sure we can execute the file in the container - builder.addEntry(localPath, pathOnContainer, FilePermissions.fromOctalString("754"), + // make sure the file or directory can be executed + builder.addEntry(localPath, pathOnContainer, FilePermissions.fromOctalString("755"), modificationTime); } else { builder.addEntry(localPath, pathOnContainer, modificationTime); diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibConfig.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibConfig.java index 11f280b2e4f4a..79a794daa304c 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibConfig.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibConfig.java @@ -3,6 +3,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; @@ -26,14 +27,14 @@ public class JibConfig { /** * Additional JVM arguments to pass to the JVM when starting the application */ - @ConfigItem(defaultValue = "-Dquarkus.http.host=0.0.0.0,-Djava.util.logging.manager=org.jboss.logmanager.LogManager") + @ConfigItem(defaultValue = "-Djava.util.logging.manager=org.jboss.logmanager.LogManager") public List jvmArguments; /** * Additional arguments to pass when starting the native application */ - @ConfigItem(defaultValue = "-Dquarkus.http.host=0.0.0.0") - public List nativeArguments; + @ConfigItem + public Optional> nativeArguments; /** * If this is set, then it will be used as the entry point of the container image. @@ -41,9 +42,10 @@ public class JibConfig { *