Skip to content

Commit

Permalink
Merge branch 'master' into pr/22373
Browse files Browse the repository at this point in the history
  • Loading branch information
JackPGreen committed Oct 17, 2023
2 parents 217b161 + 35dc152 commit 31e48d5
Show file tree
Hide file tree
Showing 766 changed files with 9,719 additions and 9,685 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/assign-to-test-failure.yml
Expand Up @@ -41,7 +41,7 @@ jobs:
return checkoutRef
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
continue-on-error: true
with:
fetch-depth: 0
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/aws-terraform-integration-tests.yml
Expand Up @@ -40,7 +40,7 @@ jobs:
suite: ['ec2_cl_to_ec2_m', 'ec2_cl_to_fargate_ecs_m', 'fargate_ecs_cl_to_ec2_m', 'fargate_ecs_cl_to_fargate_ecs_m']
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v3.0.0
uses: aws-actions/configure-aws-credentials@v4.0.1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
Expand All @@ -53,7 +53,7 @@ jobs:
DEVOPS_GITHUB_TOKEN,CN/DEVOPS_GITHUB_TOKEN
- name: Checkout Auto Discovery Test Suite
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: hazelcast/auto-discovery-test-suite
token: ${{ env.DEVOPS_GITHUB_TOKEN }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/azure-terraform-integration-tests.yml
Expand Up @@ -50,7 +50,7 @@ jobs:
fi
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{steps.decide-ref.outputs.ref}}

Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/build-artifact.yml
Expand Up @@ -27,7 +27,7 @@ jobs:
CL_IMG: ${{ steps.get-cl-image-tag.outputs.CL_IMG }}
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v3.0.0
uses: aws-actions/configure-aws-credentials@v4.0.1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
Expand All @@ -41,7 +41,7 @@ jobs:
GCP_SA_KEY,CN/GKE_SA_KEY
- name: Checkout Auto Discovery Test Suite
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: hazelcast/auto-discovery-test-suite
token: ${{ env.DEVOPS_GITHUB_TOKEN }}
Expand All @@ -57,7 +57,7 @@ jobs:
fi
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{steps.decide-ref.outputs.ref}}
path: ${{steps.decide-ref.outputs.ref}}
Expand Down Expand Up @@ -119,7 +119,7 @@ jobs:
path: aws-discovery-suite/terraform/tools/client/aws-discovery-client.jar

- name: Authenticate to GAR
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
registry: us-east1-docker.pkg.dev
username: _json_key
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/codeql-analysis.yml
Expand Up @@ -32,7 +32,7 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v3.3.0
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.reference }}

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/gcp-terraform-integration-tests.yml
Expand Up @@ -47,7 +47,7 @@ jobs:
fi
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{steps.decide-ref.outputs.ref}}

Expand Down
9 changes: 7 additions & 2 deletions .github/workflows/k8s-terraform-integration-tests.yml
Expand Up @@ -41,7 +41,7 @@ jobs:
suite: ['gke']
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v3.0.0
uses: aws-actions/configure-aws-credentials@v4.0.1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
Expand Down Expand Up @@ -71,7 +71,7 @@ jobs:
kubectl config view > ${HOME}/.kube/config
- name: Checkout Auto Discovery Test Suite
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: hazelcast/auto-discovery-test-suite
token: ${{ env.DEVOPS_GITHUB_TOKEN }}
Expand All @@ -93,6 +93,11 @@ jobs:
restore-keys: |
${{ runner.os }}-go-
- name: Add env vars
run: |
echo "GCLOUD_SA=$(gcloud auth list --filter=status:ACTIVE --format="value(account)")" >> "$GITHUB_ENV"
echo "GCLOUD_ID_TOKEN=$(gcloud auth print-identity-token)" >> "$GITHUB_ENV"
- name: Run Tests
working-directory: auto-discovery-suite/terraform/test
run: |
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -2,6 +2,7 @@ target/
.clover/
.idea/
!.idea/icon.png
!.idea/icon.svg
.settings/
.attach_pid*
.classpath
Expand Down
Binary file removed .idea/icon.png
Binary file not shown.
1 change: 1 addition & 0 deletions .idea/icon.svg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -2,7 +2,7 @@

[![Slack](https://img.shields.io/badge/slack-chat-green.svg)](https://slack.hazelcast.com/)
[![javadoc](https://javadoc.io/badge2/com.hazelcast/hazelcast/latest/javadoc.svg)](https://javadoc.io/doc/com.hazelcast/hazelcast/latest)
[![Docker pulls](https://img.shields.io/docker/pulls/hazelcast/hazelcast)](https://img.shields.io/docker/pulls/hazelcast/hazelcast)
[![Docker pulls](https://img.shields.io/docker/pulls/hazelcast/hazelcast)](https://hub.docker.com/r/hazelcast/hazelcast)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=hazelcast_hazelcast&metric=alert_status)](https://sonarcloud.io/dashboard?id=hazelcast_hazelcast)

----
Expand Down Expand Up @@ -354,7 +354,7 @@ see used in the comments on your PR:
* *`run-nightly-tests` - run nightly (slow) tests. WARNING: Use with care as this is a resource consuming task.*
* *`run-ee-nightly-tests` - run nightly (slow) tests from hazelcast-enterprise. WARNING: Use with care as this is a resource consuming task.*
* `run-sql-only` - run default tests in `hazelcast-sql`, `hazelcast-distribution`, and `extensions/mapstore` modules
* `run-docs-only` - do not run any tests, check that only files with `.md` or `.adoc` suffix are added in the PR
* `run-docs-only` - do not run any tests, check that only files with `.md`, `.adoc` or `.txt` suffix are added in the PR
* `run-sonar` - run SonarCloud analysis
* `run-arm64` - run the tests on arm64 machine

Expand All @@ -372,7 +372,7 @@ anywhere other than `hazelcast-sql`.

When creating a PR which changes only documentation (files with suffix `.md` or `.adoc`) it
makes no sense to run tests. For that case the label `docs-only` can be used. The job will fail
in case you've made other changes than in `.md` or `.adoc` files.
in case you've made other changes than in `.md`, `.adoc` or `.txt` files.

## License

Expand Down
3 changes: 0 additions & 3 deletions distribution/pom.xml
Expand Up @@ -202,17 +202,14 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-layout-template-json</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
</dependency>

<!-- Prometheus to expose JMX metrics as HTTP endpoint -->
Expand Down
6 changes: 4 additions & 2 deletions docs/design/alto/client-interaction.md
Expand Up @@ -16,7 +16,8 @@ Clients will continue to use the classic Hazelcast ports for the
initial authentication.

If the TPC is enabled on the client-side, clients will make use of the two
newly added codecs to authenticate with the cluster.
newly added codecs to authenticate with the cluster. (note: these codecs were removed later on and
`tpcPorts` and `tpcToken` fields were added to the stable codecs. This was done as part of making Alto stable)

- `ExperimentalAuthenticationCodec`
- Counterpart of `ClientAuthenticationCodec`
Expand Down Expand Up @@ -67,7 +68,8 @@ classic ports.
First, 3-bytes long protocol identifier is sent. (bytearray representation of `CP2`).

Then, the client authenticates to the TPC event loop using the new
`ExperimentalTpcAuthenticationCodec`, which has the following definition.
`ExperimentalTpcAuthenticationCodec`, which has the following definition. (note: this codec was later removed and
replaced by the new `ClientTpcAuthenticationCodec` when the TPC was promoted to stable status.)

```yaml
- id: 3
Expand Down
2 changes: 1 addition & 1 deletion docs/design/jet/025-jet-job-partition-pruning.md
Expand Up @@ -38,7 +38,7 @@ Non-goals are:
- support any kind of pruning for streaming jobs. It may be considered to do later, but now it is not a case.
- special support migration-tolerance for member and processor pruning. If the migration happens when the job is starting,
it will be running suboptimally, because it may fetch data from other members - same behavior as we have currently.
- support local index scan. Index scan is not supported in pure Jet, only SQL has dedicated processor for that.
- support partitioned index scan. Index scan is not supported in pure Jet, only SQL has dedicated processor for that.

## Technical Design

Expand Down
1 change: 0 additions & 1 deletion extensions/cdc-debezium/pom.xml
Expand Up @@ -139,7 +139,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
1 change: 0 additions & 1 deletion extensions/cdc-mysql/pom.xml
Expand Up @@ -127,7 +127,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
1 change: 0 additions & 1 deletion extensions/cdc-postgres/pom.xml
Expand Up @@ -118,7 +118,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>

Expand Down
1 change: 0 additions & 1 deletion extensions/csv/pom.xml
Expand Up @@ -94,7 +94,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
2 changes: 1 addition & 1 deletion extensions/elasticsearch/elasticsearch-7/pom.xml
Expand Up @@ -112,7 +112,7 @@
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.17.13</version>
<version>7.17.14</version>
</dependency>

<!-- TEST -->
Expand Down
1 change: 0 additions & 1 deletion extensions/grpc/pom.xml
Expand Up @@ -100,7 +100,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
1 change: 0 additions & 1 deletion extensions/hadoop/pom.xml
Expand Up @@ -113,7 +113,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
8 changes: 3 additions & 5 deletions extensions/kafka-connect/pom.xml
Expand Up @@ -64,7 +64,7 @@
<plugin>
<groupId>com.googlecode.maven-download-plugin</groupId>
<artifactId>download-maven-plugin</artifactId>
<version>1.6.8</version>
<version>1.7.1</version>
<executions>
<execution>
<id>download-zip-file</id>
Expand All @@ -74,7 +74,7 @@
<configuration>
<url>https://repository.hazelcast.com/download/tests/confluentinc-kafka-connect-datagen-0.6.0.zip</url>
<unpack>false</unpack>
<outputDirectory>${project.build.directory}/classes</outputDirectory>
<outputDirectory>${project.build.directory}/test-classes</outputDirectory>
</configuration>
</execution>
</executions>
Expand All @@ -92,13 +92,11 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -122,7 +120,7 @@
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>java-client</artifactId>
<version>3.4.10</version>
<version>3.4.11</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
1 change: 0 additions & 1 deletion extensions/kafka/pom.xml
Expand Up @@ -101,7 +101,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
1 change: 0 additions & 1 deletion extensions/kinesis/pom.xml
Expand Up @@ -90,7 +90,6 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>test</scope>
</dependency>

Expand Down
Expand Up @@ -262,7 +262,7 @@ private List<SqlColumnMetadata> resolveMappingColumns() {
}

private void readExistingMapping() {
logger.fine("Reading existing mapping for map" + mapName);
logger.fine("Reading existing mapping for map " + mapName);
try {
// If mappingName does not exist, we get "... did you forget to CREATE MAPPING?" exception
columnMetadataList = mappingHelper.loadColumnMetadataFromMapping(mappingName);
Expand Down
Expand Up @@ -395,7 +395,7 @@ public void testDestroy() {
/**
* Regression test for https://github.com/hazelcast/hazelcast/issues/22567
*/
@Test(timeout = 90_000L)
@Test(timeout = 180_000L)
@Category(NightlyTest.class)
public void testClear() {
HazelcastInstance client = client();
Expand All @@ -422,7 +422,10 @@ public void testInstanceShutdown() throws Exception {
// create another member
logger.info("Starting third member instance");
HazelcastInstance hz3 = factory().newHazelcastInstance(memberConfig);
assertClusterSizeEventually(3, hz3);

HazelcastInstance[] instances = new HazelcastInstance[]{instances()[0], instances()[1], hz3};
assertClusterSizeEventually(3, instances);
waitAllForSafeState(instances);
logger.info("Third member instance started with name " + hz3.getName());

ExceptionRecorder recorder = new ExceptionRecorder(hz3, Level.WARNING);
Expand All @@ -432,6 +435,8 @@ public void testInstanceShutdown() throws Exception {
for (int i = 1; i < itemSize; i++) {
map.put(i, new Person(i, "name-" + i));
}
logger.info("Putting data into the IMap finished");

// Ensure that all put operations are inserted into the DB. Otherwise, we may get
// HazelcastSqlException: Hazelcast instance is not active! from the SqlService
// when we shut down the hz3 instance
Expand Down Expand Up @@ -460,7 +465,8 @@ public void testInstanceShutdown() throws Exception {
assertThat(p.getName()).isEqualTo("name-" + itemSize);

for (Throwable throwable : recorder.exceptionsLogged()) {
assertThat(throwable).hasMessageNotContaining("is not active!");
assertThat(throwable).hasMessageNotContaining("HazelcastSqlException: The Jet SQL job failed: "
+ "Hazelcast instance is not active!");
}
}
}
Expand Up @@ -30,7 +30,6 @@
import com.hazelcast.jet.pipeline.Sinks;
import com.hazelcast.jet.retry.RetryStrategies;
import com.hazelcast.jet.retry.RetryStrategy;
import com.hazelcast.security.permission.ConnectorPermission;
import com.hazelcast.spi.annotation.Beta;
import com.mongodb.TransactionOptions;
import com.mongodb.client.MongoClient;
Expand Down Expand Up @@ -261,9 +260,7 @@ public Sink<T> build() {
final WriteMongoParams<T> localParams = this.params;
localParams.setCheckExistenceOnEachConnect(existenceChecks == ResourceChecks.ON_EACH_CONNECT);

ConnectorPermission permission = params.buildPermission();
return Sinks.fromProcessor(name, new DbCheckingPMetaSupplierBuilder()
.withRequiredPermission(permission)
.withCheckResourceExistence(localParams.isCheckExistenceOnEachConnect())
.withForceTotalParallelismOne(false)
.withDatabaseName(localParams.getDatabaseName())
Expand Down

0 comments on commit 31e48d5

Please sign in to comment.