+
+
+Importing AWS Module In Maven Project
+-------------------------------------
+
+If you are using Maven to manage dependencies of your project, you can add AWS module
+dependency like this (replace '${ignite.version}' with actual Ignite version you are
+interested in):
+
+
+ ...
+
+ ...
+
+ org.apache.ignite
+ ignite-aws2-ext
+ ${ignite-aws2-ext.version}
+
+ ...
+
+ ...
+
diff --git a/modules/aws2-ext/RELEASE_NOTES.txt b/modules/aws2-ext/RELEASE_NOTES.txt
new file mode 100644
index 000000000..0d8564f60
--- /dev/null
+++ b/modules/aws2-ext/RELEASE_NOTES.txt
@@ -0,0 +1,7 @@
+Apache Ignite Extensions Release Notes
+======================================
+
+Apache Ignite AWS Module 1.0.0
+------------------------------
+
+* Initial release of the Apache Ignite AWS module that provides S3-based implementations of checkpoint SPI and IP finder for TCP discovery.
diff --git a/modules/aws2-ext/assembly/aws-ext.xml b/modules/aws2-ext/assembly/aws-ext.xml
new file mode 100644
index 000000000..0c28f4c44
--- /dev/null
+++ b/modules/aws2-ext/assembly/aws-ext.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ bin
+ false
+
+
+ zip
+
+
+
+ ../../assembly/bin-component-shared.xml
+
+
diff --git a/modules/aws2-ext/licenses/apache-2.0.txt b/modules/aws2-ext/licenses/apache-2.0.txt
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/modules/aws2-ext/licenses/apache-2.0.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/modules/aws2-ext/pom.xml b/modules/aws2-ext/pom.xml
new file mode 100644
index 000000000..b803011a1
--- /dev/null
+++ b/modules/aws2-ext/pom.xml
@@ -0,0 +1,243 @@
+
+
+
+
+
+ 4.0.0
+
+
+ org.apache.ignite
+ ignite-parent-ext-internal
+ 1
+ ../../parent-internal/pom.xml
+
+
+ ignite-aws2-ext
+ 2.0.0
+ https://ignite.apache.org
+
+
+ 2.29.45
+ 3.0.0
+ 1.78.1
+ 4.5.14
+
+
+
+
+
+
+ org.springframework
+ spring-framework-bom
+ ${spring.version}
+ pom
+ import
+
+
+
+ software.amazon.awssdk
+ bom
+ ${aws.sdk2.version}
+ pom
+ import
+
+
+
+
+
+
+ org.apache.ignite
+ ignite-core
+ provided
+
+
+
+ software.amazon.awssdk
+ aws-core
+
+
+
+ software.amazon.awssdk
+ s3
+
+
+
+ software.amazon.awssdk
+ ec2
+
+
+
+ software.amazon.awssdk
+ elasticloadbalancing
+
+
+
+ software.amazon.awssdk
+ elasticloadbalancingv2
+
+
+
+ software.amazon.awssdk
+ kms
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+ ${jackson.version}
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ ${jackson.version}
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${jackson.version}
+
+
+
+ com.amazonaws
+ aws-encryption-sdk-java
+ ${aws.encryption.sdk.version}
+
+
+ org.bouncycastle
+ bcprov-ext-jdk15on
+
+
+
+
+
+ org.bouncycastle
+ bcprov-ext-jdk18on
+ ${bouncycastle.version}
+
+
+
+ joda-time
+ joda-time
+ 2.13.0
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ ${httpclient.version}
+
+
+
+ commons-codec
+ commons-codec
+ ${commons.codec.version}
+
+
+
+ javax.xml.bind
+ jaxb-api
+ 2.3.1
+
+
+
+ org.apache.ignite
+ ignite-core
+ test-jar
+ test
+
+
+
+ org.mockito
+ mockito-core
+ ${mockito.version}
+ test
+
+
+
+ org.springframework
+ spring-beans
+ test
+
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+
+ org.springframework
+ spring-context
+ test
+
+
+
+ org.springframework
+ spring-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j2-impl
+ 2.24.3
+ test
+
+
+
+
+
+
+
+
+ org.apache.felix
+ maven-bundle-plugin
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+ false
+
+
+
+
+ maven-dependency-plugin
+
+
+ package
+
+ copy-dependencies
+
+
+ ${project.build.directory}/libs
+ compile
+ ignite-core,ignite-spring,ignite-shmem
+
+
+
+
+
+
+
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/aws2/S3Utils.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/aws2/S3Utils.java
new file mode 100644
index 000000000..44b04c1a9
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/aws2/S3Utils.java
@@ -0,0 +1,23 @@
+package org.apache.ignite.spi.aws2;
+
+
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.HeadBucketRequest;
+import software.amazon.awssdk.services.s3.model.S3Exception;
+
+public class S3Utils {
+
+ public static boolean doesBucketExist(S3Client s3, String bucketName) {
+ try {
+ s3.headBucket(HeadBucketRequest.builder().bucket(bucketName).build());
+ return true;
+ } catch (S3Exception e) {
+// https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html
+ // Bucket doesn't exist if a 404, 400, or 403 status code is returned
+ if (e.statusCode() == 404 || e.statusCode() == 400 || e.statusCode() == 403) {
+ return false;
+ }
+ throw e; // Re-throw other exceptions
+ }
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointData.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointData.java
new file mode 100644
index 000000000..22801946f
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointData.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Wrapper of all checkpoint that are saved to the S3. It
+ * extends every checkpoint with expiration time and host name
+ * which created this checkpoint.
+ *
+ * Host name is used by {@link S3CheckpointSpi} SPI to give node
+ * correct files if it is restarted.
+ */
+class S3CheckpointData {
+ /** Checkpoint data. */
+ private final byte[] state;
+
+ /** Checkpoint expiration time. */
+ private final long expTime;
+
+ /** Checkpoint key. */
+ private final String key;
+
+ /**
+ * Creates new instance of checkpoint data wrapper.
+ *
+ * @param state Checkpoint data.
+ * @param expTime Checkpoint expiration time in milliseconds.
+ * @param key Key of checkpoint.
+ */
+ S3CheckpointData(byte[] state, long expTime, String key) {
+ assert expTime >= 0;
+
+ this.state = state;
+ this.expTime = expTime;
+ this.key = key;
+ }
+
+ /**
+ * Gets checkpoint data.
+ *
+ * @return Checkpoint data.
+ */
+ byte[] getState() {
+ return state;
+ }
+
+ /**
+ * Gets checkpoint expiration time.
+ *
+ * @return Expire time in milliseconds.
+ */
+ long getExpireTime() {
+ return expTime;
+ }
+
+ /**
+ * Gets key of checkpoint.
+ *
+ * @return Key of checkpoint.
+ */
+ public String getKey() {
+ return key;
+ }
+
+ /**
+ * @return Serialized checkpoint data.
+ */
+ public byte[] toBytes() {
+ byte[] keyBytes = key.getBytes();
+
+ byte[] bytes = new byte[4 + state.length + 8 + 4 + keyBytes.length];
+
+ U.intToBytes(state.length, bytes, 0);
+ U.arrayCopy(state, 0, bytes, 4, state.length);
+ U.longToBytes(expTime, bytes, 4 + state.length);
+ U.intToBytes(keyBytes.length, bytes, 4 + state.length + 8);
+ U.arrayCopy(keyBytes, 0, bytes, 4 + state.length + 8 + 4, keyBytes.length);
+
+ return bytes;
+ }
+
+ /**
+ * @param in Input stream.
+ * @return Checkpoint data.
+ * @throws IOException In case of error.
+ */
+ public static S3CheckpointData fromStream(InputStream in) throws IOException {
+ byte[] buf = new byte[8];
+
+ read(in, buf, 4);
+
+ byte[] state = new byte[U.bytesToInt(buf, 0)];
+
+ read(in, state, state.length);
+
+ read(in, buf, 8);
+
+ long expTime = U.bytesToLong(buf, 0);
+
+ read(in, buf, 4);
+
+ byte[] keyBytes = new byte[U.bytesToInt(buf, 0)];
+
+ read(in, keyBytes, keyBytes.length);
+
+ return new S3CheckpointData(state, expTime, new String(keyBytes));
+ }
+
+ /**
+ * @param in Input stream.
+ * @param buf Buffer.
+ * @param len Number of bytes to read.
+ * @throws IOException In case of error.
+ */
+ private static void read(InputStream in, byte[] buf, int len) throws IOException {
+ int cnt = in.read(buf, 0, len);
+
+ if (cnt < len)
+ throw new IOException("End of stream reached.");
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(S3CheckpointData.class, this);
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpi.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpi.java
new file mode 100644
index 000000000..efd4c5008
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpi.java
@@ -0,0 +1,849 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.IgniteInstanceResource;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.*;
+import org.apache.ignite.spi.checkpoint.CheckpointListener;
+import org.apache.ignite.spi.checkpoint.CheckpointSpi;
+import org.jetbrains.annotations.Nullable;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+import software.amazon.awssdk.awscore.exception.AwsServiceException;
+import software.amazon.awssdk.core.ResponseInputStream;
+import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
+import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.*;
+import software.amazon.awssdk.services.s3.paginators.ListObjectsV2Iterable;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.util.*;
+
+/**
+ * This class defines Amazon S3-based implementation for checkpoint SPI.
+ *
+ * For information about Amazon S3 visit aws.amazon.com.
+ *
+ *
+ *
+ * This SPI has one mandatory configuration parameter:
+ *
+ * - AWS credentials (see {@link #setAwsCredentials(AwsCredentials)}
+ *
+ *
+ * This SPI has following optional configuration parameters:
+ *
+ * - Bucket name suffix (see {@link #setBucketNameSuffix(String)})
+ * - Client configuration (see {@link #setClientConfiguration(ClientOverrideConfiguration)})
+ * - Bucket endpoint (see {@link #setBucketEndpoint(String)})
+ * - Server side encryption algorithm (see {@link #setSSEAlgorithm(String)})
+ *
+ *
+ * {@link S3CheckpointSpi} can be configured as follows:
+ *
+ * IgniteConfiguration cfg = new IgniteConfiguration();
+ *
+ * S3CheckpointSpi spi = new S3CheckpointSpi();
+ *
+ * AWSCredentials cred = new BasicAWSCredentials(YOUR_ACCESS_KEY_ID, YOUR_SECRET_ACCESS_KEY);
+ *
+ * spi.setAwsCredentials(cred);
+ *
+ * spi.setBucketNameSuffix("checkpoints");
+ *
+ * // Override default checkpoint SPI.
+ * cfg.setCheckpointSpi(cpSpi);
+ *
+ * // Start grid.
+ * G.start(cfg);
+ *
+ *
+ * {@link S3CheckpointSpi} can be configured from Spring XML configuration file:
+ *
+ * <bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration" singleton="true">
+ * ...
+ * <property name="checkpointSpi">
+ * <bean class="org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpi">
+ * <property name="awsCredentials">
+ * <bean class="com.amazonaws.auth.BasicAWSCredentials">
+ * <constructor-arg value="YOUR_ACCESS_KEY_ID" />
+ * <constructor-arg value="YOUR_SECRET_ACCESS_KEY" />
+ * </bean>
+ * </property>
+ * </bean>
+ * </property>
+ * ...
+ * </bean>
+ *
+ *
+ * Note that storing data in AWS S3 service will result in charges to your AWS account.
+ * Choose another implementation of {@link org.apache.ignite.spi.checkpoint.CheckpointSpi} for local or
+ * home network tests.
+ *
+ *
+ *
+ * For information about Spring framework visit www.springframework.org
+ * @see org.apache.ignite.spi.checkpoint.CheckpointSpi
+ */
+@IgniteSpiMultipleInstancesSupport(true)
+public class S3CheckpointSpi extends IgniteSpiAdapter implements CheckpointSpi {
+ /** Logger. */
+ @SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
+ @LoggerResource
+ private IgniteLogger log;
+
+ /** Ignite instance. */
+ @IgniteInstanceResource
+ private Ignite ignite;
+
+ /** Task that takes care about outdated files. */
+ private S3TimeoutWorker timeoutWrk;
+
+ /** Listener. */
+ private CheckpointListener lsnr;
+
+ /** Prefix to use in bucket name generation. */
+ public static final String BUCKET_NAME_PREFIX = "ignite-checkpoint-";
+
+ /** Suffix to use in bucket name generation. */
+ public static final String DFLT_BUCKET_NAME_SUFFIX = "default-bucket";
+
+ /** Client to interact with S3 storage. */
+ @GridToStringExclude
+ private S3Client s3;
+
+ /** Bucket name suffix (set by user). */
+ private String bucketNameSuffix;
+
+ /** Bucket name (generated). */
+ private String bucketName;
+
+ /**
+ * AWS region.
+ */
+ private String awsRegion = "us-east-1";
+
+ /** Bucket endpoint (set by user). */
+ @Nullable private String bucketEndpoint;
+
+ /** Server side encryption algorithm */
+ @Nullable private String sseAlg;
+
+ /** Amazon client configuration. */
+ private ClientOverrideConfiguration cfgOverride;
+
+ /**
+ * AWS Credentials.
+ */
+ @GridToStringExclude
+ private AwsCredentials cred;
+
+ /**
+ * AWS Credentials.
+ */
+ @GridToStringExclude
+ private AwsCredentialsProvider credProvider;
+
+ /** Mutex. */
+ private final Object mux = new Object();
+
+ /**
+ * Gets S3 bucket name to use.
+ *
+ * @return S3 bucket name to use.
+ */
+ public String getBucketName() {
+ return bucketName;
+ }
+
+ /**
+ * Gets S3 bucket endpoint to use.
+ *
+ * @return S3 bucket endpoint to use.
+ */
+ @Nullable public String getBucketEndpoint() {
+ return bucketEndpoint;
+ }
+
+ /**
+ * Gets S3 server-side encryption algorithm.
+ *
+ * @return S3 server-side encryption algorithm to use.
+ */
+ @Nullable public String getSSEAlgorithm() {
+ return sseAlg;
+ }
+
+ /**
+ * Gets S3 access key.
+ *
+ * @return S3 access key.
+ */
+ public String getAccessKey() {
+ return cred.accessKeyId();
+ }
+
+ /**
+ * Gets S3 secret key.
+ *
+ * @return S3 secret key.
+ */
+ public String getSecretAccessKey() {
+ return cred.secretAccessKey();
+ }
+
+
+ public String getAwsRegion() {
+ return awsRegion;
+ }
+
+ public S3CheckpointSpi setAwsRegion(String awsRegion) {
+ this.awsRegion = awsRegion;
+ return this;
+ }
+
+ /**
+ * Sets bucket name suffix.
+ *
+ * @param bucketNameSuffix Bucket name suffix.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public S3CheckpointSpi setBucketNameSuffix(String bucketNameSuffix) {
+ this.bucketNameSuffix = bucketNameSuffix;
+
+ return this;
+ }
+
+ /**
+ * Sets bucket endpoint.
+ * If the endpoint is not set then S3CheckpointSpi will go to each region to find a corresponding bucket.
+ * For information about possible endpoint names visit
+ * docs.aws.amazon.com
+ *
+ * @param bucketEndpoint Bucket endpoint, for example, {@code }s3.us-east-2.amazonaws.com.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public S3CheckpointSpi setBucketEndpoint(String bucketEndpoint) {
+ this.bucketEndpoint = bucketEndpoint;
+
+ return this;
+ }
+
+ /**
+ * Sets server-side encryption algorithm for Amazon S3-managed encryption keys.
+ * For information about possible S3-managed encryption keys visit
+ * docs.aws.amazon.com.
+ *
+ * @param sseAlg Server-side encryption algorithm, for example, AES256 or SSES3.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public S3CheckpointSpi setSSEAlgorithm(String sseAlg) {
+ this.sseAlg = sseAlg;
+
+ return this;
+ }
+
+ /**
+ * Sets Amazon client configuration.
+ *
+ * For details refer to Amazon S3 API reference.
+ *
+ * @param cfg Amazon client configuration.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public S3CheckpointSpi setClientConfiguration(ClientOverrideConfiguration cfg) {
+ this.cfgOverride = cfg;
+
+ return this;
+ }
+
+ /**
+ * Sets AWS credentials.
+ *
+ * For details refer to Amazon S3 API reference.
+ *
+ * @param cred AWS credentials.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = false)
+ public S3CheckpointSpi setAwsCredentials(AwsCredentials cred) {
+ this.cred = cred;
+
+ return this;
+ }
+
+ /**
+ * Instantiates {@code AmazonS3Client} instance.
+ *
+ * @return Client instance to use to connect to AWS.
+ */
+ S3Client createAmazonS3Client() {
+ S3ClientBuilder builder = S3Client.builder();
+
+ // Set credentials
+ if (cred != null) {
+ builder.credentialsProvider(StaticCredentialsProvider.create(cred));
+ } else if (credProvider != null) {
+ builder.credentialsProvider(credProvider);
+ }
+ if (cfgOverride != null) {
+ builder.overrideConfiguration(cfgOverride);
+ }
+
+ if (!F.isEmpty(bucketEndpoint)) {
+ builder.endpointOverride(URI.create(bucketEndpoint));
+ }
+ builder.region(Region.of(awsRegion));
+
+ return builder.build();
+ }
+
+ private boolean doesBucketExist(String bucketName) {
+ try {
+ s3.headBucket(HeadBucketRequest.builder().bucket(bucketName).build());
+ return true;
+ } catch (S3Exception e) {
+ // Bucket doesn't exist if a 404 status code is returned
+ if (e.statusCode() == 404) {
+ return false;
+ }
+ throw e; // Re-throw other exceptions
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public void spiStart(String igniteInstanceName) throws IgniteSpiException {
+ // Start SPI start stopwatch.
+ startStopwatch();
+
+ assertParameter(cred != null, "awsCredentials != null");
+
+ if (log.isDebugEnabled()) {
+ log.debug(configInfo("awsCredentials", cred));
+ log.debug(configInfo("clientConfiguration", cfgOverride));
+ log.debug(configInfo("bucketNameSuffix", bucketNameSuffix));
+ log.debug(configInfo("bucketEndpoint", bucketEndpoint));
+ log.debug(configInfo("SSEAlgorithm", sseAlg));
+ }
+
+ if (cfgOverride == null)
+ U.warn(log, "Amazon client configuration is not set (will use default).");
+
+ if (F.isEmpty(bucketNameSuffix)) {
+ U.warn(log, "Bucket name suffix is null or empty (will use default bucket name).");
+
+ bucketName = BUCKET_NAME_PREFIX + DFLT_BUCKET_NAME_SUFFIX;
+ }
+ else
+ bucketName = BUCKET_NAME_PREFIX + bucketNameSuffix;
+
+ s3 = createAmazonS3Client();
+
+
+ if (!doesBucketExist(bucketName)) {
+ try {
+ s3.createBucket(CreateBucketRequest.builder()
+ .bucket(bucketName)
+ .build());
+
+ if (log.isDebugEnabled())
+ log.debug("Created S3 bucket: " + bucketName);
+
+ while (!doesBucketExist(bucketName))
+ try {
+ U.sleep(200);
+ }
+ catch (IgniteInterruptedCheckedException e) {
+ throw new IgniteSpiException("Thread has been interrupted.", e);
+ }
+ }
+ catch (SdkClientException e) {
+ try {
+ if (!doesBucketExist(bucketName))
+ throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+ }
+ catch (SdkClientException ignored) {
+ throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+ }
+ }
+ }
+
+ Collection s3TimeDataLst = new LinkedList<>();
+
+ try {
+ ListObjectsV2Request request = ListObjectsV2Request.builder()
+ .bucket(bucketName)
+ .build();
+ ListObjectsV2Iterable responsePages = s3.listObjectsV2Paginator(request);
+
+ // Process each page of results
+ for (ListObjectsV2Response page : responsePages) {
+ for (S3Object s3Object : page.contents()) {
+ S3CheckpointData data = read(s3Object.key());
+
+ if (data != null) {
+ s3TimeDataLst.add(new S3TimeData(data.getExpireTime(), data.getKey()));
+
+ if (log.isDebugEnabled()) {
+ log.debug("Registered existing checkpoint from key: " + data.getKey());
+ }
+ }
+ }
+ }
+ }
+ catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to read checkpoint bucket: " + bucketName, e);
+ }
+ catch (IgniteCheckedException e) {
+ throw new IgniteSpiException("Failed to marshal/unmarshal objects in bucket: " + bucketName, e);
+ }
+
+ // Track expiration for only those data that are made by this node
+ timeoutWrk = new S3TimeoutWorker();
+
+ timeoutWrk.add(s3TimeDataLst);
+
+ timeoutWrk.start();
+
+ registerMBean(igniteInstanceName, new S3CheckpointSpiMBeanImpl(this), S3CheckpointSpiMBean.class);
+
+ // Ack ok start.
+ if (log.isDebugEnabled())
+ log.debug(startInfo());
+ }
+
+ /** {@inheritDoc} */
+ @Override public void spiStop() throws IgniteSpiException {
+ if (timeoutWrk != null) {
+ IgniteUtils.interrupt(timeoutWrk);
+ IgniteUtils.join(timeoutWrk, log);
+ }
+
+ unregisterMBean();
+
+ // Ack ok stop.
+ if (log.isDebugEnabled())
+ log.debug(stopInfo());
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] loadCheckpoint(String key) throws IgniteSpiException {
+ assert !F.isEmpty(key);
+
+ try {
+ S3CheckpointData data = read(key);
+
+ return data != null ?
+ data.getExpireTime() == 0 || data.getExpireTime() > U.currentTimeMillis() ?
+ data.getState() :
+ null :
+ null;
+ }
+ catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to read checkpoint key: " + key, e);
+ }
+ catch (IgniteCheckedException e) {
+ throw new IgniteSpiException("Failed to marshal/unmarshal objects in checkpoint key: " + key, e);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean saveCheckpoint(String key, byte[] state, long timeout, boolean overwrite)
+ throws IgniteSpiException {
+ assert !F.isEmpty(key);
+
+ long expireTime = 0;
+
+ if (timeout > 0) {
+ expireTime = U.currentTimeMillis() + timeout;
+
+ if (expireTime < 0)
+ expireTime = Long.MAX_VALUE;
+ }
+
+ try {
+ if (hasKey(key)) {
+ if (!overwrite)
+ return false;
+
+ if (log.isDebugEnabled())
+ log.debug("Overriding existing key: " + key);
+ }
+
+ S3CheckpointData data = new S3CheckpointData(state, expireTime, key);
+
+ write(data);
+ }
+ catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to write checkpoint data [key=" + key + ", state=" +
+ Arrays.toString(state) + ']', e);
+ }
+ catch (IgniteCheckedException e) {
+ throw new IgniteSpiException("Failed to marshal checkpoint data [key=" + key + ", state=" +
+ Arrays.toString(state) + ']', e);
+ }
+
+ if (timeout > 0)
+ timeoutWrk.add(new S3TimeData(expireTime, key));
+
+ return true;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean removeCheckpoint(String key) {
+ assert !F.isEmpty(key);
+
+ timeoutWrk.remove(key);
+
+ boolean rmv = false;
+
+ try {
+ rmv = delete(key);
+ }
+ catch (SdkClientException e) {
+ U.error(log, "Failed to delete data by key: " + key, e);
+ }
+
+ if (rmv) {
+ CheckpointListener tmpLsnr = lsnr;
+
+ if (tmpLsnr != null)
+ tmpLsnr.onCheckpointRemoved(key);
+ }
+
+ return rmv;
+ }
+
+ /**
+ * Reads checkpoint data.
+ *
+ * @param key Key name to read data from.
+ * @return Checkpoint data object.
+ * @throws IgniteCheckedException Thrown if an error occurs while unmarshalling.
+ * @throws SdkClientException If an error occurs while querying Amazon S3.
+ */
+ @Nullable private S3CheckpointData read(String key) throws IgniteCheckedException, SdkClientException {
+ assert !F.isEmpty(key);
+
+ if (log.isDebugEnabled())
+ log.debug("Reading data from S3 [bucket=" + bucketName + ", key=" + key + ']');
+
+ try {
+ GetObjectRequest getObjectRequest = GetObjectRequest.builder()
+ .bucket(bucketName)
+ .key(key)
+ .build();
+ ResponseInputStream objectResponse = s3.getObject(getObjectRequest);
+ InputStream in = objectResponse;
+ try {
+ return S3CheckpointData.fromStream(in);
+ }
+ catch (IOException e) {
+ throw new IgniteCheckedException("Failed to unmarshal S3CheckpointData [bucketName=" +
+ bucketName + ", key=" + key + ']', e);
+ }
+ finally {
+ U.closeQuiet(in);
+ }
+ }
+ catch (AwsServiceException e) {
+ if (e.statusCode() != 404)
+ throw e;
+ }
+ return null;
+ }
+
+ /**
+ * Writes given checkpoint data to a given S3 bucket. Data is serialized to
+ * the binary stream and saved to the S3.
+ *
+ * @param data Checkpoint data.
+ * @throws IgniteCheckedException Thrown if an error occurs while marshalling.
+ * @throws SdkClientException If an error occurs while querying Amazon S3.
+ */
+ private void write(S3CheckpointData data) throws IgniteCheckedException, SdkClientException {
+ assert data != null;
+
+ if (log.isDebugEnabled())
+ log.debug("Writing data to S3 [bucket=" + bucketName + ", key=" + data.getKey() + ']');
+
+ byte[] buf = data.toBytes();
+
+ PutObjectRequest.Builder requestBuilder = PutObjectRequest.builder()
+ .bucket(bucketName)
+ .key(data.getKey())
+ .contentLength((long) buf.length); // Set content length
+
+ // Add server-side encryption algorithm if provided
+ if (sseAlg != null && !sseAlg.isEmpty()) {
+ requestBuilder.serverSideEncryption(sseAlg);
+ }
+
+ PutObjectRequest request = requestBuilder.build();
+
+ // Upload the object
+ s3.putObject(request, RequestBody.fromBytes(buf));
+ }
+
+ /**
+ * Deletes checkpoint data.
+ *
+ * @param key Key of the data in storage.
+ * @return {@code True} if operations succeeds and data is actually removed.
+ * @throws SdkClientException If an error occurs while querying Amazon S3.
+ */
+ private boolean delete(String key) throws SdkClientException {
+ assert !F.isEmpty(key);
+
+ if (log.isDebugEnabled())
+ log.debug("Removing data from S3 [bucket=" + bucketName + ", key=" + key + ']');
+
+ if (!hasKey(key))
+ return false;
+
+ s3.deleteObject( dr -> dr.bucket(bucketName).key(key));
+
+ return true;
+ }
+
+ /**
+ * Returns {@code true} if mapping presents for the provided key.
+ *
+ * @param key Key to check mapping for.
+ * @return {@code true} if mapping presents for key.
+ * @throws SdkClientException If an error occurs while querying Amazon S3.
+ */
+ boolean hasKey(String key) throws SdkClientException {
+ assert !F.isEmpty(key);
+
+ try {
+ HeadObjectRequest headObjectRequest = HeadObjectRequest.builder()
+ .bucket(bucketName)
+ .key(key)
+ .build();
+
+ // Get the metadata and check the content length
+ HeadObjectResponse response = s3.headObject(headObjectRequest);
+ return response.contentLength() != 0;
+ }
+ catch (AwsServiceException e) {
+ if (e.statusCode() != 404)
+ throw e;
+ }
+
+ return false;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setCheckpointListener(CheckpointListener lsnr) {
+ this.lsnr = lsnr;
+ }
+
+ /** {@inheritDoc} */
+ @Override public S3CheckpointSpi setName(String name) {
+ super.setName(name);
+
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(S3CheckpointSpi.class, this);
+ }
+
+ /**
+ * Implementation of {@link org.apache.ignite.spi.IgniteSpiThread} that takes care about outdated S3 data.
+ * Every checkpoint has expiration date after which it makes no sense to
+ * keep it. This worker periodically cleans S3 bucket according to checkpoints
+ * expiration time.
+ */
+ private class S3TimeoutWorker extends IgniteSpiThread {
+ /** List of data with access and expiration date. */
+ private Map map = new HashMap<>();
+
+ /**
+ * Constructor.
+ */
+ S3TimeoutWorker() {
+ super(ignite.name(), "grid-s3-checkpoint-worker", log);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void body() throws InterruptedException {
+ long nextTime = 0;
+
+ Collection rmvKeys = new HashSet<>();
+
+ while (!isInterrupted()) {
+ rmvKeys.clear();
+
+ synchronized (mux) {
+ long delay = U.currentTimeMillis() - nextTime;
+
+ if (nextTime != 0 && delay > 0)
+ mux.wait(delay);
+
+ long now = U.currentTimeMillis();
+
+ nextTime = -1;
+
+ // check map one by one and physically remove
+ // if (now - last modification date) > expiration time
+ for (Iterator> iter = map.entrySet().iterator(); iter.hasNext();) {
+ Map.Entry entry = iter.next();
+
+ String key = entry.getKey();
+
+ S3TimeData timeData = entry.getValue();
+
+ if (timeData.getExpireTime() > 0)
+ if (timeData.getExpireTime() <= now) {
+ try {
+ delete(key);
+
+ if (log.isDebugEnabled())
+ log.debug("Data was deleted by timeout: " + key);
+ }
+ catch (SdkClientException e) {
+ U.error(log, "Failed to delete data by key: " + key, e);
+ }
+
+ iter.remove();
+
+ rmvKeys.add(timeData.getKey());
+ }
+ else if (timeData.getExpireTime() < nextTime || nextTime == -1)
+ nextTime = timeData.getExpireTime();
+ }
+ }
+
+ CheckpointListener tmpLsnr = lsnr;
+
+ if (tmpLsnr != null)
+ for (String key : rmvKeys)
+ tmpLsnr.onCheckpointRemoved(key);
+ }
+
+ synchronized (mux) {
+ map.clear();
+ }
+ }
+
+ /**
+ * Adds data to a list of files this task should look after.
+ *
+ * @param timeData File expiration and access information.
+ */
+ void add(S3TimeData timeData) {
+ assert timeData != null;
+
+ synchronized (mux) {
+ map.put(timeData.getKey(), timeData);
+
+ mux.notifyAll();
+ }
+ }
+
+ /**
+ * Adds list of data this task should look after.
+ *
+ * @param newData List of data.
+ */
+ void add(Iterable newData) {
+ assert newData != null;
+
+ synchronized (mux) {
+ for (S3TimeData data : newData)
+ map.put(data.getKey(), data);
+
+ mux.notifyAll();
+ }
+ }
+
+ /**
+ * Removes data.
+ *
+ * @param key Checkpoint key.
+ */
+ public void remove(String key) {
+ assert key != null;
+
+ synchronized (mux) {
+ map.remove(key);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(S3TimeoutWorker.class, this);
+ }
+ }
+
+ /**
+ * MBean implementation for S3CheckpointSpi.
+ */
+ private class S3CheckpointSpiMBeanImpl extends IgniteSpiMBeanAdapter implements S3CheckpointSpiMBean {
+ /** {@inheritDoc} */
+ S3CheckpointSpiMBeanImpl(IgniteSpiAdapter spiAdapter) {
+ super(spiAdapter);
+ }
+
+ /** {@inheritDoc} */
+ @Override public String getBucketName() {
+ return S3CheckpointSpi.this.getBucketName();
+ }
+
+ /** {@inheritDoc} */
+ @Override public String getBucketEndpoint() {
+ return S3CheckpointSpi.this.getBucketName();
+ }
+
+ /** {@inheritDoc} */
+ @Override public String getSSEAlgorithm() {
+ return S3CheckpointSpi.this.getSSEAlgorithm();
+ }
+
+ /** {@inheritDoc} */
+ @Override public String getAccessKey() {
+ return S3CheckpointSpi.this.getAccessKey();
+ }
+
+
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiMBean.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiMBean.java
new file mode 100644
index 000000000..396a6d057
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiMBean.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.mxbean.MXBeanDescription;
+import org.apache.ignite.spi.IgniteSpiManagementMBean;
+
+/**
+ * Management bean for {@link S3CheckpointSpi}.
+ */
+@MXBeanDescription("MBean that provides access to S3 checkpoint SPI configuration.")
+public interface S3CheckpointSpiMBean extends IgniteSpiManagementMBean {
+ /**
+ * Gets S3 bucket name to use.
+ *
+ * @return S3 bucket name to use.
+ */
+ @MXBeanDescription("S3 bucket name.")
+ public String getBucketName();
+
+ /**
+ * @return S3 bucket endpoint.
+ */
+ @MXBeanDescription("S3 bucket endpoint.")
+ public String getBucketEndpoint();
+
+ /**
+ * @return S3 server-side encryption algorithm.
+ */
+ @MXBeanDescription("S3 server-side encryption algorithm.")
+ public String getSSEAlgorithm();
+
+ /**
+ * @return S3 access key.
+ */
+ @MXBeanDescription("S3 access key.")
+ public String getAccessKey();
+
+ /**
+ * @return HTTP proxy host.
+ */
+
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3TimeData.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3TimeData.java
new file mode 100644
index 000000000..7378b65bc
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/S3TimeData.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Helper class that keeps checkpoint expiration date inside to track and delete
+ * obsolete files.
+ */
+class S3TimeData {
+ /** Checkpoint expiration date. */
+ private long expTime;
+
+ /** Key of checkpoint. */
+ private String key;
+
+ /**
+ * Creates new instance of checkpoint time information.
+ *
+ * @param expTime Checkpoint expiration time.
+ * @param key Key of checkpoint.
+ */
+ S3TimeData(long expTime, String key) {
+ assert expTime >= 0;
+
+ this.expTime = expTime;
+ this.key = key;
+ }
+
+ /**
+ * Gets checkpoint expiration time.
+ *
+ * @return Expire time.
+ */
+ long getExpireTime() {
+ return expTime;
+ }
+
+ /**
+ * Sets checkpoint expiration time.
+ *
+ * @param expTime Checkpoint time-to-live value.
+ */
+ void setExpireTime(long expTime) {
+ assert expTime >= 0;
+
+ this.expTime = expTime;
+ }
+
+ /**
+ * Gets checkpoint key.
+ *
+ * @return Checkpoint key.
+ */
+ String getKey() {
+ return key;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(S3TimeData.class, this);
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java
new file mode 100644
index 000000000..f0edfc094
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ * Contains S3-based implementation for checkpoint SPI.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/IgniteS3Client.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/IgniteS3Client.java
new file mode 100644
index 000000000..95c3d090c
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/IgniteS3Client.java
@@ -0,0 +1,142 @@
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.typedef.F;
+import org.jetbrains.annotations.Nullable;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+
+import java.net.URI;
+
+public class IgniteS3Client {
+
+
+ /** Client to interact with S3 storage. */
+ @GridToStringExclude
+ private S3Client s3;
+
+ /** Bucket name. */
+ String bucketName;
+
+ /** Bucket endpoint. */
+ @Nullable
+ private String bucketEndpoint;
+
+ /** Server side encryption algorithm. */
+ @Nullable String sseAlg;
+
+ public S3Client getS3() {
+ return s3;
+ }
+
+
+ public String getBucketName() {
+ return bucketName;
+ }
+
+ public IgniteS3Client setBucketName(String bucketName) {
+ this.bucketName = bucketName;
+ return this;
+ }
+
+ public @Nullable String getBucketEndpoint() {
+ return bucketEndpoint;
+ }
+
+ public IgniteS3Client setBucketEndpoint(@Nullable String bucketEndpoint) {
+ this.bucketEndpoint = bucketEndpoint;
+ return this;
+ }
+
+ public @Nullable String getSseAlg() {
+ return sseAlg;
+ }
+
+ public IgniteS3Client setSseAlg(@Nullable String sseAlg) {
+ this.sseAlg = sseAlg;
+ return this;
+ }
+
+ public ClientOverrideConfiguration getCfgOverride() {
+ return cfgOverride;
+ }
+
+ public IgniteS3Client setCfgOverride(ClientOverrideConfiguration cfgOverride) {
+ this.cfgOverride = cfgOverride;
+ return this;
+ }
+
+ public AwsCredentials getCred() {
+ return cred;
+ }
+
+ public IgniteS3Client setCred(AwsCredentials cred) {
+ this.cred = cred;
+ return this;
+ }
+
+ public AwsCredentialsProvider getCredProvider() {
+ return credProvider;
+ }
+
+ public IgniteS3Client setCredProvider(AwsCredentialsProvider credProvider) {
+ this.credProvider = credProvider;
+ return this;
+ }
+
+ public String getAwsRegion() {
+ return awsRegion;
+ }
+
+ public IgniteS3Client setAwsRegion(String awsRegion) {
+ this.awsRegion = awsRegion;
+ return this;
+ }
+
+ /** Amazon client configuration. */
+ private ClientOverrideConfiguration cfgOverride;
+
+ /** AWS Credentials. */
+ @GridToStringExclude
+ private AwsCredentials cred;
+
+ /** AWS Credentials. */
+ @GridToStringExclude
+ private AwsCredentialsProvider credProvider;
+
+ /** AWS region. */
+ private String awsRegion = "us-east-1";
+
+
+ /**
+ * Instantiates {@code AmazonS3Client} instance.
+ *
+ * @return Client instance to use to connect to AWS.
+ */
+ S3Client createAmazonS3Client() {
+ S3ClientBuilder builder = S3Client.builder();
+
+ // Set credentials
+ if (cred != null) {
+ builder.credentialsProvider(StaticCredentialsProvider.create(cred));
+ } else if (credProvider != null) {
+ builder.credentialsProvider(credProvider);
+ }
+ if (cfgOverride != null) {
+ builder.overrideConfiguration(cfgOverride);
+ }
+
+ if (!F.isEmpty(bucketEndpoint)) {
+ builder.endpointOverride(URI.create(bucketEndpoint));
+ }
+ builder.region(Region.of(awsRegion));
+
+ return builder.build();
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinder.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinder.java
new file mode 100644
index 000000000..b9c252dae
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinder.java
@@ -0,0 +1,566 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.commons.codec.binary.Base32;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.SB;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.aws2.S3Utils;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt.EncryptionService;
+import org.jetbrains.annotations.Nullable;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
+import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.*;
+
+import java.io.ByteArrayInputStream;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.regex.Pattern;
+
+/**
+ * AWS S3-based IP finder.
+ *
+ * For information about Amazon S3 visit aws.amazon.com.
+ *
+ *
+ *
+ * - AWS credentials (see {@link #setAwsCredentials(AwsCredentials)} and
+ * {@link #setAwsCredentialsProvider(AwsCredentialsProvider)}
+ * - Bucket name (see {@link #setBucketName(String)})
+ *
+ *
+ *
+ * - Client configuration (see {@link #setClientConfigurationOverride(ClientOverrideConfiguration)})
+ * - Shared flag (see {@link #setShared(boolean)})
+ * - Bucket endpoint (see {@link #setBucketEndpoint(String)})
+ * - Server side encryption algorithm (see {@link #setSSEAlgorithm(String)})
+ * - Key prefix for the node addresses (see {@link #setKeyPrefix(String)})
+ * - Client side encryption service (see {@link #setEncryptionService(EncryptionService)})
+ *
+ *
+ * The finder will create S3 bucket with configured name. The bucket will contain entries named like the following:
+ * {@code 192.168.1.136#1001}.
+ *
+ * Note that storing data in AWS S3 service will result in charges to your AWS account. Choose another implementation of
+ * {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local or home network tests.
+ *
+ * Note that this finder is shared by default (see {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder#isShared()}.
+ */
+public class TcpDiscoveryS3IpFinder extends TcpDiscoveryIpFinderAdapter {
+ /**
+ * Delimiter to use in S3 entries name.
+ */
+ private static final String DELIM = "#";
+
+ /**
+ * Entry content.
+ */
+ private static final byte[] ENTRY_CONTENT = new byte[]{1};
+
+ /**
+ * Entry metadata.
+ */
+ @GridToStringExclude
+ private final Map objMetadata = new HashMap<>();
+
+ /**
+ * Grid logger.
+ */
+ @LoggerResource
+ private IgniteLogger log;
+
+ /**
+ * Client to interact with S3 storage.
+ */
+ @GridToStringExclude
+ private S3Client s3;
+
+ /**
+ * Bucket name.
+ */
+ private String bucketName;
+
+ /**
+ * Bucket endpoint.
+ */
+ @Nullable
+ private String bucketEndpoint;
+
+ /**
+ * Server side encryption algorithm.
+ */
+ @Nullable
+ private String sseAlg;
+
+ /**
+ * Sub-folder name to write node addresses.
+ */
+ @Nullable
+ private String keyPrefix;
+
+ /**
+ * Encryption service.
+ **/
+ @Nullable
+ private EncryptionService encryptionSvc;
+
+ /**
+ * Init guard.
+ */
+ @GridToStringExclude
+ private final AtomicBoolean initGuard = new AtomicBoolean();
+
+ /**
+ * Init latch.
+ */
+ @GridToStringExclude
+ private final CountDownLatch initLatch = new CountDownLatch(1);
+
+ /**
+ * Amazon client configuration.
+ */
+ private ClientOverrideConfiguration cfgOverride;
+
+ /**
+ * AWS Credentials.
+ */
+ @GridToStringExclude
+ private AwsCredentialsProvider credProvider;
+
+ /**
+ * AWS region.
+ */
+ private String awsRegion = "us-east-1";
+
+ /**
+ * Constructor.
+ */
+ public TcpDiscoveryS3IpFinder() {
+ setShared(true);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public Collection getRegisteredAddresses() throws IgniteSpiException {
+ initClient();
+
+ Collection addrs = new LinkedList<>();
+
+ try {
+ String continuationToken = null;
+ do {
+ // Build the ListObjectsV2Request
+ ListObjectsV2Request.Builder requestBuilder = ListObjectsV2Request.builder()
+ .bucket(bucketName);
+ if (keyPrefix != null) {
+ requestBuilder.prefix(keyPrefix);
+ }
+ if (continuationToken != null) {
+ requestBuilder.continuationToken(continuationToken);
+ }
+
+ ListObjectsV2Request request = requestBuilder.build();
+ ListObjectsV2Response response = s3.listObjectsV2(request);
+
+ // Process the objects in the response
+ for (S3Object objectSummary : response.contents()) {
+ InetSocketAddress addr = addr(objectSummary);
+
+ if (addr != null) {
+ addrs.add(addr);
+ }
+ }
+
+ // Update the continuation token
+ continuationToken = response.nextContinuationToken();
+ } while (continuationToken != null);
+ } catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to list objects in the bucket: " + bucketName, e);
+ }
+
+ return addrs;
+ }
+
+ /**
+ * Parses the S3 key to return the ip and addresses.
+ *
+ * @param sum S3 Object summary.
+ */
+ private InetSocketAddress addr(S3Object sum) {
+ String key = sum.key();
+ String addr = key;
+
+ if (keyPrefix != null)
+ addr = key.replaceFirst(Pattern.quote(keyPrefix), "");
+
+ if (encryptionSvc != null) {
+ byte[] encBytes = new Base32().decode(addr.getBytes(StandardCharsets.UTF_8));
+ byte[] decBytes = encryptionSvc.decrypt(encBytes);
+ addr = new String(decBytes, StandardCharsets.UTF_8).replaceAll("=", "");
+ }
+
+ StringTokenizer st = new StringTokenizer(addr, DELIM);
+
+ if (st.countTokens() != 2)
+ U.error(log, "Failed to parse S3 entry due to invalid format: " + addr);
+ else {
+ String addrStr = st.nextToken();
+ String portStr = st.nextToken();
+
+ int port = -1;
+
+ try {
+ port = Integer.parseInt(portStr);
+ } catch (NumberFormatException e) {
+ U.error(log, "Failed to parse port for S3 entry: " + addr, e);
+ }
+
+ if (port != -1)
+ try {
+ return new InetSocketAddress(addrStr, port);
+ } catch (IllegalArgumentException e) {
+ U.error(log, "Failed to parse port for S3 entry: " + addr, e);
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void registerAddresses(Collection addrs) throws IgniteSpiException {
+ assert !F.isEmpty(addrs);
+
+ initClient();
+
+ for (InetSocketAddress addr : addrs) {
+ String key = key(addr);
+
+ try {
+
+ PutObjectRequest.Builder metaBuilder = PutObjectRequest.builder()
+ .bucket(bucketName)
+ .key(key)
+ .metadata(objMetadata);
+ if (!F.isEmpty(sseAlg)) {
+ metaBuilder.serverSideEncryption(sseAlg);
+ }
+
+ PutObjectRequest putObjectRequest = metaBuilder
+ .build();
+
+ s3.putObject(putObjectRequest, RequestBody.fromInputStream(new ByteArrayInputStream(ENTRY_CONTENT), ENTRY_CONTENT.length));
+
+ } catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to put entry [bucketName=" + bucketName +
+ ", entry=" + key + ']', e);
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void unregisterAddresses(Collection addrs) throws IgniteSpiException {
+ assert !F.isEmpty(addrs);
+
+ initClient();
+
+ for (InetSocketAddress addr : addrs) {
+ String key = key(addr);
+
+ try {
+ s3.deleteObject(c -> {
+ c.bucket(bucketName);
+ c.key(key);
+ });
+ } catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to delete entry [bucketName=" + bucketName +
+ ", entry=" + key + ']', e);
+ }
+ }
+ }
+
+ /**
+ * Gets S3 key for provided address.
+ *
+ * @param addr Node address.
+ * @return Key.
+ */
+ private String key(InetSocketAddress addr) {
+ assert addr != null;
+
+ SB sb = new SB();
+
+ if (keyPrefix != null)
+ sb.a(keyPrefix);
+
+ String addrStr = addr.getAddress().getHostAddress();
+
+ if (encryptionSvc != null) {
+ String addrPort = new SB()
+ .a(addrStr)
+ .a(DELIM)
+ .a(addr.getPort()).toString();
+
+ byte[] encBytes = encryptionSvc.encrypt(addrPort.getBytes(StandardCharsets.UTF_8));
+ byte[] base32Bytes = new Base32().encode(encBytes);
+ String encStr = new String(base32Bytes, StandardCharsets.UTF_8).replaceAll("=", "");
+
+ sb.a(encStr);
+ } else
+ sb.a(addrStr)
+ .a(DELIM)
+ .a(addr.getPort());
+
+ return sb.toString();
+ }
+
+
+ private boolean doesBucketExist(String bucketName) {
+ return S3Utils.doesBucketExist( s3, bucketName);
+ }
+
+ /**
+ * Amazon s3 client initialization.
+ *
+ * @throws org.apache.ignite.spi.IgniteSpiException In case of error.
+ */
+ private void initClient() throws IgniteSpiException {
+ if (initGuard.compareAndSet(false, true))
+ try {
+
+ if (cfgOverride == null)
+ U.warn(log, "Amazon client configuration override is not set (will use default).");
+
+ if (F.isEmpty(bucketName))
+ throw new IgniteSpiException("Bucket name is null or empty (provide bucket name and restart).");
+
+
+ s3 = createAmazonS3Client();
+
+ if (!doesBucketExist(bucketName)) {
+ try {
+ s3.createBucket(CreateBucketRequest.builder()
+ .bucket(bucketName)
+ .build());
+
+ if (log.isDebugEnabled())
+ log.debug("Created S3 bucket: " + bucketName);
+
+ while (!doesBucketExist(bucketName))
+ try {
+ U.sleep(200);
+ } catch (IgniteInterruptedCheckedException e) {
+ throw new IgniteSpiException("Thread has been interrupted.", e);
+ }
+ } catch (SdkClientException e) {
+ if (doesBucketExist(bucketName)) {
+ s3 = null;
+
+ throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+ }
+ }
+ }
+ } finally {
+ initLatch.countDown();
+ }
+ else {
+ try {
+ U.await(initLatch);
+ } catch (IgniteInterruptedCheckedException e) {
+ throw new IgniteSpiException("Thread has been interrupted.", e);
+ }
+
+ if (s3 == null)
+ throw new IgniteSpiException("Ip finder has not been initialized properly.");
+ }
+ }
+
+ /**
+ * Instantiates {@code AmazonS3Client} instance.
+ *
+ * @return Client instance to use to connect to AWS.
+ */
+ S3Client createAmazonS3Client() {
+ S3ClientBuilder builder = S3Client.builder();
+
+ builder.credentialsProvider(credProvider);
+
+ if (cfgOverride != null) {
+ builder.overrideConfiguration(cfgOverride);
+ }
+
+ if (!F.isEmpty(bucketEndpoint)) {
+ builder.endpointOverride(URI.create(bucketEndpoint));
+ }
+ builder.region(Region.of(awsRegion));
+
+ return builder.build();
+ }
+
+ /**
+ * Sets bucket name for IP finder.
+ *
+ * @param bucketName Bucket name.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = false)
+ public TcpDiscoveryS3IpFinder setBucketName(String bucketName) {
+ this.bucketName = bucketName;
+
+ return this;
+ }
+
+ /**
+ * Sets bucket endpoint for IP finder. If the endpoint is not set then IP finder will go to each region to find a
+ * corresponding bucket. For information about possible endpoint names visit
+ * docs.aws.amazon.com.
+ *
+ * @param bucketEndpoint Bucket endpoint, for example, s3.us-east-2.amazonaws.com.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public TcpDiscoveryS3IpFinder setBucketEndpoint(String bucketEndpoint) {
+ this.bucketEndpoint = bucketEndpoint;
+
+ return this;
+ }
+
+ /**
+ * Sets server-side encryption algorithm for Amazon S3-managed encryption keys. For information about possible
+ * S3-managed encryption keys visit
+ * docs.aws.amazon.com.
+ *
+ * @param sseAlg Server-side encryption algorithm, for example, AES256 or SSES3.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public TcpDiscoveryS3IpFinder setSSEAlgorithm(String sseAlg) {
+ this.sseAlg = sseAlg;
+
+ return this;
+ }
+
+ /**
+ * Sets Amazon client configuration.
+ *
+ * For details refer to Amazon S3 API reference.
+ *
+ * @param cfg Amazon client configuration.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public TcpDiscoveryS3IpFinder setClientConfigurationOverride(ClientOverrideConfiguration cfg) {
+ this.cfgOverride = cfg;
+
+ return this;
+ }
+
+ /**
+ * Sets encryption service for client side node address encryption.
+ *
+ * @param encryptionSvc Encryption service .
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public TcpDiscoveryS3IpFinder setEncryptionService(EncryptionService encryptionSvc) {
+ this.encryptionSvc = encryptionSvc;
+
+ return this;
+ }
+
+ @IgniteSpiConfiguration(optional = false)
+ public TcpDiscoveryS3IpFinder setAwsRegion(String region) {
+ this.awsRegion = region;
+ return this;
+ }
+
+ /**
+ * Sets AWS credentials provider.
+ *
+ * For details refer to Amazon S3 API reference.
+ *
+ * @param credProvider AWS credentials provider.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = false)
+ public TcpDiscoveryS3IpFinder setAwsCredentialsProvider(AwsCredentialsProvider credProvider) {
+ this.credProvider = credProvider;
+
+ return this;
+ }
+
+ /**
+ * This can be thought of as the sub-folder within the bucket that will hold the node addresses.
+ *
+ * For details visit
+ *
+ *
+ * @param keyPrefix AWS credentials provider.
+ * @return {@code this} for chaining.
+ */
+ @IgniteSpiConfiguration(optional = true)
+ public TcpDiscoveryS3IpFinder setKeyPrefix(String keyPrefix) {
+ this.keyPrefix = keyPrefix;
+
+ return this;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public TcpDiscoveryS3IpFinder setShared(boolean shared) {
+ super.setShared(shared);
+
+ return this;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ return S.toString(TcpDiscoveryS3IpFinder.class, this, "super", super.toString());
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionService.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionService.java
new file mode 100644
index 000000000..911ccf3e5
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionService.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+import javax.crypto.BadPaddingException;
+import javax.crypto.Cipher;
+import javax.crypto.IllegalBlockSizeException;
+import java.security.Key;
+import java.security.KeyPair;
+
+/**
+ * Provides an implementation of asymmetric encryption to encrypt/decrypt the data.
+ */
+public class AsymmetricKeyEncryptionService implements EncryptionService {
+ /** Public key. */
+ private Key publicKey;
+
+ /** Private key. */
+ private Key privateKey;
+
+ /** Encryption service. */
+ private Cipher encCipher;
+
+ /** Decryption service. */
+ private Cipher decCipher;
+
+ /**
+ * Set the public private key pair.
+ *
+ * @param keyPair Key pair of Public and Private key.
+ */
+ public void setKeyPair(KeyPair keyPair) {
+ if (keyPair.getPublic() == null)
+ throw new IgniteException("Public key was not set / was set to null.");
+
+ if (keyPair.getPrivate() == null)
+ throw new IgniteException("Private key was not set / was set to null.");
+
+ publicKey = keyPair.getPublic();
+ privateKey = keyPair.getPrivate();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void init() throws IgniteException {
+ if (privateKey == null)
+ throw new IgniteException("Private key was not set / was set to null.");
+
+ if (publicKey == null)
+ throw new IgniteException("Public key was not set / was set to null.");
+
+ encCipher = IgniteUtils.createCipher(privateKey, Cipher.ENCRYPT_MODE);
+ decCipher = IgniteUtils.createCipher(publicKey, Cipher.DECRYPT_MODE);
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] encrypt(byte[] data) {
+ if (data == null)
+ throw new IgniteException("Parameter data cannot be null");
+
+ if (encCipher == null)
+ throw new IgniteException("The init() method was not called.");
+
+ try {
+ return encCipher.doFinal(data);
+ }
+ catch (IllegalBlockSizeException | BadPaddingException e) {
+ throw new IgniteException(e);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] decrypt(byte[] data) {
+ if (data == null)
+ throw new IgniteException("Parameter data cannot be null");
+
+ if (decCipher == null)
+ throw new IgniteException("The init() method was not called.");
+
+ try {
+ return decCipher.doFinal(data);
+ }
+ catch (IllegalBlockSizeException | BadPaddingException e) {
+ throw new IgniteException(e);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(AsymmetricKeyEncryptionService.class, this, "super", super.toString());
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/EncryptionService.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/EncryptionService.java
new file mode 100644
index 000000000..cd02ef45d
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/EncryptionService.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+/**
+ * A implementation of this interface should provide feature to encrypt/decrypt the data.
+ */
+public interface EncryptionService {
+ /**
+ * For initialization operations. Must be called before the {@link EncryptionService#encrypt(byte[])} and {@link
+ * EncryptionService#decrypt(byte[])} are used.
+ */
+ public void init();
+
+ /**
+ * Encrypt the input data.
+ *
+ * @param data Data. bytes to be encrypted.
+ * @return The encrypted data bytes.
+ * @throws IllegalArgumentException If the parameter data is null.
+ */
+ public byte[] encrypt(byte[] data);
+
+ /**
+ * Decrypt the input data.
+ *
+ * @param data Encrypted data.
+ * @return Decrypted result.
+ * @throws IllegalArgumentException If the parameter data is null.
+ */
+ public byte[] decrypt(byte[] data);
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionService.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionService.java
new file mode 100644
index 000000000..6b006dd5e
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionService.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+import javax.crypto.BadPaddingException;
+import javax.crypto.Cipher;
+import javax.crypto.IllegalBlockSizeException;
+import java.security.Key;
+
+/**
+ * Service to encrypt data using symmetric key encryption.
+ */
+public class SymmetricKeyEncryptionService implements EncryptionService {
+ /** Secret key. */
+ private Key secretKey;
+
+ /** Cipher, to be used for encryption. */
+ private Cipher encCipher;
+
+ /** Cipher, to be used for decryption. */
+ private Cipher decCipher;
+
+ /**
+ * The key used to encrypt and decrypt the data.
+ *
+ * @param secretKey Secret key.
+ * @return {@code this} for chaining.
+ */
+ public SymmetricKeyEncryptionService setSecretKey(Key secretKey) {
+ this.secretKey = secretKey;
+
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void init() throws IgniteException {
+ if (secretKey == null)
+ throw new IgniteException("Secret key was not set / was set to null.");
+
+ encCipher = IgniteUtils.createCipher(secretKey, Cipher.ENCRYPT_MODE);
+ decCipher = IgniteUtils.createCipher(secretKey, Cipher.DECRYPT_MODE);
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] encrypt(byte[] data) {
+ if (data == null)
+ throw new IgniteException("Parameter [data] cannot be null");
+
+ if (encCipher == null)
+ throw new IgniteException("The init() method was not called.");
+
+ try {
+ return encCipher.doFinal(data);
+ }
+ catch (IllegalBlockSizeException | BadPaddingException e) {
+ throw new IgniteException(e);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] decrypt(byte[] data) {
+ if (data == null)
+ throw new IgniteException("Parameter [data] cannot be null");
+
+ if (decCipher == null)
+ throw new IgniteException("The init() method was not called.");
+
+ try {
+ return decCipher.doFinal(data);
+ }
+ catch (BadPaddingException | IllegalBlockSizeException e) {
+ throw new IgniteException(e);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SymmetricKeyEncryptionService.class, this, "super", super.toString());
+ }
+}
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java
new file mode 100644
index 000000000..b8ce13464
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains Encryption services.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
diff --git a/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java
new file mode 100644
index 000000000..ab0b0378a
--- /dev/null
+++ b/modules/aws2-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains AWS S3-based IP finder.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointManagerSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointManagerSelfTest.java
new file mode 100644
index 000000000..47ec31b2a
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointManagerSelfTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManagerAbstractSelfTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+
+/**
+ * Checkpoint manager test using {@link S3CheckpointSpi}.
+ */
+public class S3CheckpointManagerSelfTest extends GridCheckpointManagerAbstractSelfTest {
+ /** {@inheritDoc} */
+ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
+ assertTrue("Unexpected Ignite instance name: " + igniteInstanceName, igniteInstanceName.contains("s3"));
+
+ IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
+
+ S3CheckpointSpi spi = new S3CheckpointSpi();
+
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+ spi.setAwsCredentials(cred);
+
+ spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+ cfg.setCheckpointSpi(spi);
+
+ return cfg;
+ }
+
+ /**
+ * @throws Exception Thrown if any exception occurs.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testS3Based() throws Exception {
+ retries = 6;
+
+ doTest("s3");
+ }
+
+ /**
+ * @throws Exception Thrown if any exception occurs.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testMultiNodeS3Based() throws Exception {
+ retries = 6;
+
+ doMultiNodeTest("s3");
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiConfigSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiConfigSelfTest.java
new file mode 100644
index 000000000..6c7b139df
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiConfigSelfTest.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.testframework.junits.spi.GridSpiAbstractConfigTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI config self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiConfigSelfTest extends GridSpiAbstractConfigTest {
+ /**
+ * @throws Exception If failed.
+ */
+ @Test
+ public void testNegativeConfig() throws Exception {
+ checkNegativeSpiProperty(new S3CheckpointSpi(), "awsCredentials", null);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiSelfTest.java
new file mode 100644
index 000000000..b3d73eb25
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiSelfTest.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.GridTestIoUtils;
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.lang.GridAbsClosure;
+import org.apache.ignite.internal.util.lang.GridAbsClosureX;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.checkpoint.GridCheckpointTestState;
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.spi.GridSpiAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
+import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
+import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
+import software.amazon.awssdk.services.s3.paginators.ListObjectsV2Iterable;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.concurrent.ThreadLocalRandom;
+
+/**
+ * Grid S3 checkpoint SPI self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+@Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+public class S3CheckpointSpiSelfTest extends GridSpiAbstractTest {
+ /** */
+ private static final int CHECK_POINT_COUNT = 10;
+
+ /** */
+ private static final String KEY_PREFIX = "testCheckpoint";
+
+ /** {@inheritDoc} */
+ @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+
+ spi.setAwsCredentials(cred);
+
+ spi.setBucketNameSuffix(getBucketNameSuffix());
+
+ super.spiConfigure(spi);
+ }
+
+ /**
+ * @throws Exception If error.
+ */
+ @Override protected void afterSpiStopped() throws Exception {
+
+ AwsCredentialsProvider credProvider = IgniteS3TestConfiguration.getAwsCredentialsProvider();
+
+ S3Client s3 = S3Client.builder().credentialsProvider(credProvider).build();
+
+ String bucketName = S3CheckpointSpi.BUCKET_NAME_PREFIX + "unit-test-bucket";
+
+ try {
+ ListObjectsV2Request request = ListObjectsV2Request.builder()
+ .bucket(bucketName)
+ .build();
+ ListObjectsV2Iterable list = s3.listObjectsV2Paginator(request);
+
+
+ ListObjectsV2Iterable responsePages = s3.listObjectsV2Paginator(request);
+
+ // Iterate through each page of results
+ for (ListObjectsV2Response response : responsePages) {
+ // Delete each object in the current page
+ response.contents().forEach(s3Object -> {
+ s3.deleteObject(DeleteObjectRequest.builder()
+ .bucket(bucketName)
+ .key(s3Object.key())
+ .build());
+ });
+ }
+ }
+ catch (SdkClientException e) {
+ throw new IgniteSpiException("Failed to read checkpoint bucket: " + bucketName, e);
+ }
+ }
+
+ /**
+ * @throws Exception Thrown in case of any errors.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testSaveLoadRemoveWithoutExpire() throws Exception {
+ String dataPrefix = "Test check point data ";
+
+ // Save states.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ GridCheckpointTestState state = new GridCheckpointTestState(dataPrefix + i);
+
+ getSpi().saveCheckpoint(KEY_PREFIX + i, GridTestIoUtils.serializeJdk(state), 0, true);
+ }
+
+ // Load and check states.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ final String key = KEY_PREFIX + i;
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertNotNull("Missing checkpoint: " + key,
+ getSpi().loadCheckpoint(key));
+ }
+ });
+
+ // Doing it again as pulling value from repeated assertion is tricky,
+ // and all assertions below shouldn't be retried in case of failure.
+ byte[] serState = getSpi().loadCheckpoint(key);
+
+ GridCheckpointTestState state = GridTestIoUtils.deserializeJdk(serState);
+
+ assertNotNull("Can't load checkpoint state for key: " + key, state);
+ assertEquals("Invalid state loaded [expected='" + dataPrefix + i + "', received='" + state.getData() + "']",
+ dataPrefix + i, state.getData());
+ }
+
+ // Remove states.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ final String key = KEY_PREFIX + i;
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertTrue(getSpi().removeCheckpoint(key));
+ }
+ });
+ }
+
+ // Check that states was removed.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ final String key = KEY_PREFIX + i;
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertNull(getSpi().loadCheckpoint(key));
+ }
+ });
+ }
+ }
+
+ /**
+ * @throws Exception Thrown in case of any errors.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testSaveWithExpire() throws Exception {
+ // Save states.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ GridCheckpointTestState state = new GridCheckpointTestState("Test check point data " + i + '.');
+
+ getSpi().saveCheckpoint(KEY_PREFIX + i, GridTestIoUtils.serializeJdk(state), 1, true);
+ }
+
+ // For small expiration intervals no warranty that state will be removed.
+ Thread.sleep(100);
+
+ // Check that states was removed.
+ for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+ final String key = KEY_PREFIX + i;
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertNull("Checkpoint state should not be loaded with key: " + key,
+ getSpi().loadCheckpoint(key));
+ }
+ });
+ }
+ }
+
+ /**
+ * @throws Exception Thrown in case of any errors.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testDuplicates() throws Exception {
+ int idx1 = 1;
+ int idx2 = 2;
+
+ GridCheckpointTestState state1 = new GridCheckpointTestState(Integer.toString(idx1));
+ GridCheckpointTestState state2 = new GridCheckpointTestState(Integer.toString(idx2));
+
+ getSpi().saveCheckpoint(KEY_PREFIX, GridTestIoUtils.serializeJdk(state1), 0, true);
+ getSpi().saveCheckpoint(KEY_PREFIX, GridTestIoUtils.serializeJdk(state2), 0, true);
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertNotNull(getSpi().loadCheckpoint(KEY_PREFIX));
+ }
+ });
+
+ byte[] serState = getSpi().loadCheckpoint(KEY_PREFIX);
+
+ GridCheckpointTestState state = GridTestIoUtils.deserializeJdk(serState);
+
+ assertNotNull(state);
+ assertEquals(state2, state);
+
+ // Remove.
+ getSpi().removeCheckpoint(KEY_PREFIX);
+
+ assertWithRetries(new GridAbsClosureX() {
+ @Override public void applyx() throws IgniteCheckedException {
+ assertNull(getSpi().loadCheckpoint(KEY_PREFIX));
+ }
+ });
+ }
+
+ /**
+ * Wrapper around {@link GridTestUtils#retryAssert(org.apache.ignite.IgniteLogger, int, long, GridAbsClosure)}.
+ * Provides s3-specific timeouts.
+ * @param assertion Closure with assertion inside.
+ * @throws org.apache.ignite.internal.IgniteInterruptedCheckedException If was interrupted.
+ */
+ private void assertWithRetries(GridAbsClosureX assertion) throws IgniteInterruptedCheckedException {
+ GridTestUtils.retryAssert(log, 6, 5000, assertion);
+ }
+
+ /**
+ * Gets a Bucket name suffix
+ * Bucket name suffix should be unique for the host to parallel test run on one bucket.
+ * Please note that the final bucket name should not exceed 63 chars.
+ *
+ * @return Bucket name suffix.
+ */
+ static String getBucketNameSuffix() {
+ String bucketNameSuffix;
+ try {
+ bucketNameSuffix = IgniteS3TestConfiguration.getBucketName(
+ "unit-test-" + InetAddress.getLocalHost().getHostName().toLowerCase());
+ }
+ catch (UnknownHostException e) {
+ bucketNameSuffix = IgniteS3TestConfiguration.getBucketName(
+ "unit-test-rnd-" + ThreadLocalRandom.current().nextInt(100));
+ }
+
+ return bucketNameSuffix;
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopBucketEndpointSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopBucketEndpointSelfTest.java
new file mode 100644
index 000000000..5e0007525
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopBucketEndpointSelfTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopBucketEndpointSelfTest extends GridSpiStartStopAbstractTest {
+ /** {@inheritDoc} */
+ @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+ spi.setAwsCredentials(cred);
+ spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix() + "-e");
+ spi.setBucketEndpoint("s3.us-east-2.amazonaws.com");
+
+ super.spiConfigure(spi);
+ }
+
+ /** {@inheritDoc} */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ @Override public void testStartStop() throws Exception {
+ super.testStartStop();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java
new file mode 100644
index 000000000..c8ec7d619
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopSSEAlgorithmSelfTest extends GridSpiStartStopAbstractTest {
+ /** {@inheritDoc} */
+ @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+ spi.setAwsCredentials(cred);
+ spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+ spi.setSSEAlgorithm("AES256");
+
+ super.spiConfigure(spi);
+ }
+
+ /** {@inheritDoc} */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ @Override public void testStartStop() throws Exception {
+ super.testStartStop();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSelfTest.java
new file mode 100644
index 000000000..d479a1a04
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3CheckpointSpiStartStopSelfTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopSelfTest extends GridSpiStartStopAbstractTest {
+ /** {@inheritDoc} */
+ @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+ spi.setAwsCredentials(cred);
+
+ spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+ super.spiConfigure(spi);
+ }
+
+ /** {@inheritDoc} */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ @Override public void testStartStop() throws Exception {
+ super.testStartStop();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3SessionCheckpointSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3SessionCheckpointSelfTest.java
new file mode 100644
index 000000000..634df3195
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/S3SessionCheckpointSelfTest.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
+
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.session.GridSessionCheckpointAbstractSelfTest;
+import org.apache.ignite.session.GridSessionCheckpointSelfTest;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Ignore;
+import org.junit.Test;
+import software.amazon.awssdk.auth.credentials.AwsCredentials;
+
+/**
+ * Grid session checkpoint self test using {@link S3CheckpointSpi}.
+ */
+public class S3SessionCheckpointSelfTest extends GridSessionCheckpointAbstractSelfTest {
+ /**
+ * @throws Exception If failed.
+ */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ public void testS3Checkpoint() throws Exception {
+ IgniteConfiguration cfg = getConfiguration();
+
+ S3CheckpointSpi spi = new S3CheckpointSpi();
+
+ AwsCredentials cred = IgniteS3TestConfiguration.getAwsCredentials();
+ spi.setAwsCredentials(cred);
+
+ spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+ cfg.setCheckpointSpi(spi);
+
+ GridSessionCheckpointSelfTest.spi = spi;
+
+ checkCheckpoints(cfg);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java
new file mode 100644
index 000000000..cce240b76
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3_v2/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ * Contains internal tests or test related classes and interfaces.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3_v2;
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAbstractSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAbstractSelfTest.java
new file mode 100644
index 000000000..4a414c1d2
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAbstractSelfTest.java
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt.EncryptionService;
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.jetbrains.annotations.Nullable;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.concurrent.ThreadLocalRandom;
+
+/**
+ * Abstract TcpDiscoveryS3IpFinder to test with different ways of setting AWS credentials.
+ */
+public abstract class TcpDiscoveryS3IpFinderAbstractSelfTest
+ extends TcpDiscoveryIpFinderAbstractSelfTest {
+ /** Bucket endpoint */
+ @Nullable protected String bucketEndpoint;
+
+ /** Server-side encryption algorithm for Amazon S3-managed encryption keys. */
+ @Nullable protected String SSEAlgorithm;
+
+ /** Key prefix of the address. */
+ @Nullable protected String keyPrefix;
+
+ /** Encryption service. */
+ @Nullable protected EncryptionService encryptionSvc;
+
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ TcpDiscoveryS3IpFinderAbstractSelfTest() throws Exception {
+ }
+
+ /** {@inheritDoc} */
+ @Override protected TcpDiscoveryS3IpFinder ipFinder() throws Exception {
+ TcpDiscoveryS3IpFinder finder = new TcpDiscoveryS3IpFinder();
+
+ resources.inject(finder);
+
+ assert finder.isShared() : "Ip finder should be shared by default.";
+
+ setAwsCredentials(finder);
+ setBucketEndpoint(finder);
+ setRegion(finder);
+ setBucketName(finder);
+ setSSEAlgorithm(finder);
+ setKeyPrefix(finder);
+ setEncryptionService(finder);
+
+ for (int i = 0; i < 5; i++) {
+ Collection addrs = finder.getRegisteredAddresses();
+
+ if (!addrs.isEmpty())
+ finder.unregisterAddresses(addrs);
+ else
+ return finder;
+
+ U.sleep(1000);
+ }
+
+ if (!finder.getRegisteredAddresses().isEmpty())
+ throw new Exception("Failed to initialize IP finder.");
+
+ return finder;
+ }
+
+ /** {@inheritDoc} */
+ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ super.testIpFinder();
+ }
+
+ /**
+ * Set AWS credentials into the provided {@code finder}.
+ *
+ * @param finder finder credentials to set into
+ */
+ protected abstract void setAwsCredentials(TcpDiscoveryS3IpFinder finder);
+
+ /**
+ * Set Bucket endpoint into the provided {@code finder}.
+ *
+ * @param finder finder endpoint to set into.
+ */
+ private void setBucketEndpoint(TcpDiscoveryS3IpFinder finder) {
+ finder.setBucketEndpoint(bucketEndpoint);
+ }
+
+ /**
+ * Set server-side encryption algorithm for Amazon S3-managed encryption keys into the provided {@code finder}.
+ *
+ * @param finder finder encryption algorithm to set into.
+ */
+ private void setSSEAlgorithm(TcpDiscoveryS3IpFinder finder) {
+ finder.setSSEAlgorithm(SSEAlgorithm);
+ }
+
+ /**
+ * Set Bucket endpoint into the provided {@code finder}.
+ *
+ * @param finder finder endpoint to set into.
+ */
+ protected void setBucketName(TcpDiscoveryS3IpFinder finder) {
+ finder.setBucketName(getBucketName());
+ }
+
+ protected void setRegion(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsRegion( getAwsRegion());
+ }
+
+ /**
+ * Set the ip address key prefix into the provided {@code finder}.
+ *
+ * @param finder finder encryption algorithm to set into.
+ */
+ protected void setKeyPrefix(TcpDiscoveryS3IpFinder finder) {
+ finder.setKeyPrefix(keyPrefix);
+ }
+
+ /**
+ * Set encryption service into the provided {@code finder}.
+ *
+ * @param finder finder encryption service to set into.
+ */
+ protected void setEncryptionService(TcpDiscoveryS3IpFinder finder) {
+ finder.setEncryptionService(encryptionSvc);
+ }
+
+ /**
+ * Gets Bucket name. Bucket name should be unique for the host to parallel test run on one bucket. Please note that
+ * the final bucket name should not exceed 63 chars.
+ *
+ * @return Bucket name.
+ */
+ static String getBucketName() {
+ String bucketName;
+ try {
+ bucketName = IgniteS3TestConfiguration.getBucketName(
+ "ip-finder-unit-test-" + InetAddress.getLocalHost().getHostName().toLowerCase());
+ }
+ catch (UnknownHostException e) {
+ bucketName = IgniteS3TestConfiguration.getBucketName(
+ "ip-finder-unit-test-rnd-" + ThreadLocalRandom.current().nextInt(100));
+ }
+
+ return bucketName;
+ }
+
+ static String getAwsRegion() {
+ return IgniteS3TestConfiguration.getAwsRegion("us-west-2");
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java
new file mode 100644
index 000000000..d208d126a
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder test using AWS credentials provider.
+ */
+public class TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest() throws Exception {
+ // No-op.
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider( IgniteS3TestConfiguration.getAwsCredentialsProvider());
+ }
+
+ /** {@inheritDoc} */
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java
new file mode 100644
index 000000000..4809c9852
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder test using AWS credentials.
+ */
+public class TcpDiscoveryS3IpFinderAwsCredentialsSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderAwsCredentialsSelfTest() throws Exception {
+ // No-op.
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider(IgniteS3TestConfiguration.getAwsCredentialsProvider());
+ }
+
+ /** {@inheritDoc} */
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java
new file mode 100644
index 000000000..fe162eedb
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder tests bucket endpoint for IP finder.
+ * For information about possible endpoint names visit
+ * docs.aws.amazon.com.
+ */
+public class TcpDiscoveryS3IpFinderBucketEndpointSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderBucketEndpointSelfTest() throws Exception {
+ bucketEndpoint = "https://s3.us-east-2.amazonaws.com";
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider(IgniteS3TestConfiguration.getAwsCredentialsProvider());
+ }
+
+ String testBucketName() {
+ return getBucketName() + "-e" ;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setBucketName(TcpDiscoveryS3IpFinder finder) {
+ super.setBucketName(finder);
+ finder.setBucketName(testBucketName());
+ finder.setAwsRegion("us-east-2");
+ }
+
+ /** {@inheritDoc} */
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java
new file mode 100644
index 000000000..e8b73ea42
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt.EncryptionService;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client.DummyS3Client;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt.MockEncryptionService;
+import org.mockito.Mockito;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+
+/**
+ * TcpDiscoveryS3IpFinder tests client side encryption for S3 IP finder.
+ */
+public class TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest() throws Exception {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider(() -> AwsBasicCredentials.create("dummy", "dummy"));
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void setEncryptionService(TcpDiscoveryS3IpFinder finder) {
+ EncryptionService encryptionSvc = MockEncryptionService.instance();
+ encryptionSvc.init();
+ finder.setEncryptionService(encryptionSvc);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected TcpDiscoveryS3IpFinder ipFinder() {
+ TcpDiscoveryS3IpFinder ipFinder = Mockito.spy(new TcpDiscoveryS3IpFinder());
+
+ Mockito.doReturn(new DummyS3Client()).when(ipFinder).createAmazonS3Client();
+
+ setAwsCredentials(ipFinder);
+ setBucketName(ipFinder);
+ setKeyPrefix(ipFinder);
+ setEncryptionService(ipFinder);
+
+ return ipFinder;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void testIpFinder() throws Exception {
+ resources.inject(finder);
+
+ assert finder.isShared() : "Ip finder should be shared by default.";
+
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java
new file mode 100644
index 000000000..58df07272
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client.DummyS3Client;
+import org.junit.Test;
+import org.mockito.Mockito;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+
+/**
+ * TcpDiscoveryS3IpFinder tests key prefix for IP finder. For information about key prefix visit:
+ * .
+ */
+public class TcpDiscoveryS3IpFinderKeyPrefixSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderKeyPrefixSelfTest() throws Exception {
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider( () -> AwsBasicCredentials.create("dummy", "dummy"));
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setKeyPrefix(TcpDiscoveryS3IpFinder finder) {
+ finder.setKeyPrefix("/test/key/prefix");
+ }
+
+ /** {@inheritDoc} */
+ @Override protected TcpDiscoveryS3IpFinder ipFinder() throws Exception {
+ TcpDiscoveryS3IpFinder ipFinder = Mockito.spy(new TcpDiscoveryS3IpFinder());
+
+ Mockito.doReturn(new DummyS3Client()).when(ipFinder).createAmazonS3Client();
+
+ setAwsCredentials(ipFinder);
+ setBucketName(ipFinder);
+ setKeyPrefix(ipFinder);
+
+ return ipFinder;
+ }
+
+ /** {@inheritDoc} */
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ resources.inject(finder);
+
+ assert finder.isShared() : "Ip finder should be shared by default.";
+
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java
new file mode 100644
index 000000000..ca029a8c3
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
+
+import org.apache.ignite.util.IgniteS3TestConfiguration;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder tests server-side encryption algorithm for Amazon S3-managed encryption keys.
+ * For information about possible S3-managed encryption keys visit
+ * docs.aws.amazon.com.
+ */
+public class TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+ /**
+ * Constructor.
+ *
+ * @throws Exception If any error occurs.
+ */
+ public TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest() throws Exception {
+ SSEAlgorithm = "AES256";
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+ finder.setAwsCredentialsProvider(IgniteS3TestConfiguration.getAwsCredentialsProvider());
+ }
+
+ /** {@inheritDoc} */
+ @Test
+ @Override public void testIpFinder() throws Exception {
+ super.testIpFinder();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyListObjectsResponseProvider.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyListObjectsResponseProvider.java
new file mode 100644
index 000000000..9d01d43ba
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyListObjectsResponseProvider.java
@@ -0,0 +1,37 @@
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client;
+
+
+import software.amazon.awssdk.services.s3.model.*;
+
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+public class DummyListObjectsResponseProvider {
+ public static ListObjectsResponse from(ListObjectsRequest listObjectsRequest, Set allKeys) {
+ ListObjectsResponse.Builder builder = ListObjectsResponse.builder();
+ builder.marker("m1");
+ String prefix = listObjectsRequest.prefix();
+ Set keys = allKeys;
+ if(prefix!=null && !prefix.isEmpty()) {
+ keys = allKeys.stream().filter(key -> key.startsWith(prefix)).collect(Collectors.toSet());
+ }
+ List s3Objects = keys.stream().map(key -> S3Object.builder().key(key).build()).collect(Collectors.toList());
+ builder.contents(s3Objects);
+ builder.isTruncated(true);
+ return builder.build();
+ }
+
+ public static ListObjectsV2Response v2from(ListObjectsV2Request listObjectsRequest, Set allKeys) {
+ ListObjectsV2Response.Builder builder = ListObjectsV2Response.builder();
+ String prefix = listObjectsRequest.prefix();
+ Set keys = allKeys;
+ if(prefix!=null && !prefix.isEmpty()) {
+ keys = allKeys.stream().filter(key -> key.startsWith(prefix)).collect(Collectors.toSet());
+ }
+ List s3Objects = keys.stream().map(key -> S3Object.builder().key(key).build()).collect(Collectors.toList());
+ builder.contents(s3Objects);
+
+ return builder.build();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3Client.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3Client.java
new file mode 100644
index 000000000..47724729a
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3Client.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client;
+
+import org.apache.ignite.spi.aws2.S3Utils;
+import software.amazon.awssdk.awscore.exception.AwsServiceException;
+import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.core.exception.SdkException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.*;
+import java.util.*;
+
+/**
+ * Class to simulate the functionality of {@link S3Client}.
+ */
+public class DummyS3Client implements S3Client {
+ /** Map of Bucket names as keys and the keys as set of values. */
+ private final Map> objMap;
+
+ /**
+ * Constructor.
+ */
+ public DummyS3Client() {
+ this.objMap = new HashMap<>();
+ }
+
+ @Override
+ public String serviceName() {
+ return "S3";
+ }
+
+ @Override
+ public void close() {
+
+ }
+
+ /**
+ * Constructor to add an object map with fake data.
+ */
+ public DummyS3Client(Map> objMap) {
+ this.objMap = Objects.requireNonNull(objMap, "Object map cannot be null");
+ }
+
+
+ @Override
+ public ListObjectsResponse listObjects(ListObjectsRequest listObjectsRequest) throws NoSuchBucketException, AwsServiceException, software.amazon.awssdk.core.exception.SdkClientException, S3Exception {
+ checkBucketExists(listObjectsRequest.bucket());
+ return DummyListObjectsResponseProvider.from(listObjectsRequest, objMap.get(listObjectsRequest.bucket()));
+ }
+
+ @Override
+ public ListObjectsV2Response listObjectsV2(ListObjectsV2Request listObjectsRequest) throws NoSuchBucketException, AwsServiceException, SdkClientException, S3Exception {
+ checkBucketExists(listObjectsRequest.bucket());
+ return DummyListObjectsResponseProvider.v2from(listObjectsRequest, objMap.get(listObjectsRequest.bucket()));
+ }
+
+ @Override
+ public CreateBucketResponse createBucket(CreateBucketRequest r) throws BucketAlreadyExistsException, BucketAlreadyOwnedByYouException, AwsServiceException, SdkClientException, S3Exception {
+ String bucketName = r.bucket();
+ if( objMap.containsKey( bucketName)){
+ throw BucketAlreadyExistsException.builder().message("The specified bucket already exist")
+ .statusCode(409)
+ .build();
+ }
+ objMap.put(bucketName, new HashSet<>());
+ return CreateBucketResponse.builder()
+ .build();
+ }
+
+
+
+ @Override
+ public HeadBucketResponse headBucket(HeadBucketRequest headBucketRequest) throws NoSuchBucketException, AwsServiceException, SdkClientException, S3Exception {
+ if( objMap.containsKey(headBucketRequest.bucket()) ){
+ return HeadBucketResponse.builder().build();
+ }
+ NoSuchBucketException sdkException = NoSuchBucketException.builder()
+ .statusCode(404)
+ .message("The specified bucket [" + headBucketRequest.bucket() + "] does not exist")
+ .build();
+ throw sdkException;
+ }
+
+ @Override
+ public PutObjectResponse putObject(PutObjectRequest putObjectRequest, RequestBody requestBody) throws InvalidRequestException, InvalidWriteOffsetException, TooManyPartsException, EncryptionTypeMismatchException, AwsServiceException, SdkClientException, S3Exception {
+ String bucketName = putObjectRequest.bucket();
+ checkBucketExists(bucketName);
+ Set keys = objMap.get(bucketName);
+ keys.add(putObjectRequest.key());
+ return PutObjectResponse.builder().build();
+ }
+
+ @Override
+ public DeleteObjectResponse deleteObject(DeleteObjectRequest deleteObjectRequest) throws AwsServiceException, SdkClientException, S3Exception {
+ String bucketName = deleteObjectRequest.bucket();
+ checkBucketExists(bucketName);
+ Set keys = objMap.get(bucketName);
+ keys.remove(deleteObjectRequest.key());
+ return DeleteObjectResponse.builder().build();
+ }
+
+
+ /**
+ * Check if a bucket exists.
+ *
+ * @param bucketName bucket name to check.
+ * @throws NoSuchBucketException If the specified bucket does not exist.
+ */
+ private void checkBucketExists(String bucketName) {
+ if( objMap.containsKey(bucketName) ){
+ return;
+ }
+
+ throw NoSuchBucketException.builder().message("The specified bucket ["+bucketName+"] does not exist")
+ .statusCode(404).build();
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3ClientTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3ClientTest.java
new file mode 100644
index 000000000..0cf254d2a
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/DummyS3ClientTest.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client;
+
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Test;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.*;
+
+import java.util.*;
+
+import static org.apache.ignite.spi.aws2.S3Utils.doesBucketExist;
+
+/**
+ * Class to test {@link DummyS3Client}.
+ */
+public class DummyS3ClientTest extends GridCommonAbstractTest {
+ /**
+ * Instance of {@link DummyS3Client} to be used for tests.
+ */
+ private S3Client s3;
+
+ /**
+ * Holds fake key prefixes.
+ */
+ private Set fakeKeyPrefixSet;
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void beforeTest() {
+ fakeKeyPrefixSet = new HashSet<>();
+ fakeKeyPrefixSet.add("/test/path/val");
+ fakeKeyPrefixSet.add("/test/val/test/path");
+ fakeKeyPrefixSet.add("/test/test/path/val");
+
+ Map> fakeObjMap = new HashMap<>();
+
+ fakeObjMap.put("testBucket", fakeKeyPrefixSet);
+
+ s3 = new DummyS3Client(fakeObjMap);
+ }
+
+ /**
+ * Test cases to check the 'doesBucketExist' method.
+ */
+ @Test
+ public void testDoesBucketExist() {
+ assertTrue("The bucket 'testBucket' should exist", doesBucketExist(s3, "testBucket"));
+ assertFalse("The bucket 'nonExistentBucket' should not exist", doesBucketExist(s3, "nonExistentBucket"));
+ }
+
+ /**
+ * Test cases for various object listing functions for S3 bucket.
+ */
+ @Test
+ public void testListObjects() {
+ ListObjectsResponse listing = s3.listObjects(ListObjectsRequest.builder().bucket("testBucket").build());
+
+ List summaries = listing.contents();
+
+ assertFalse("'testBucket' contains keys", summaries.isEmpty());
+ assertTrue("'testBucket' contains more keys to fetch", listing.isTruncated());
+ for (S3Object s3Object : summaries) {
+ assertTrue(fakeKeyPrefixSet.contains(s3Object.key()));
+ }
+
+
+ try {
+ s3.listObjects(ListObjectsRequest.builder().bucket("nonExistentBucket").build());
+ } catch (S3Exception e) {
+ System.out.println("Exception message: ++++++++++++++++++ "+e.getMessage());
+ assertTrue(e.getMessage().startsWith("The specified bucket"));
+ }
+ }
+
+ /**
+ * Test cases for various object listing functions for S3 bucket and key prefix.
+ */
+ @Test
+ public void testListObjectsWithAPrefix() {
+
+ assertThatBucketContain("testBucket", "/test", 3);
+ assertThatBucketContain("testBucket", "/test/path", 1);
+ assertThatBucketContain("testBucket", "/test/path1", 0);
+
+ try {
+ s3.listObjects( ListObjectsRequest.builder().bucket( "nonExistentBucket").
+ prefix( "/test").build());
+ } catch (S3Exception e) {
+ System.out.println("Exception message: ++++++++++++++++++ "+e.getMessage());
+ assertTrue(e.getMessage().contains("The specified bucket"));
+ }
+ }
+
+ private void assertThatBucketContain(String testBucket, String prefix, int expectedCount) {
+ ListObjectsResponse listing = s3.listObjects( ListObjectsRequest.builder().bucket( testBucket )
+ .prefix( prefix).build());
+ List summaries = listing.contents();
+ if( expectedCount>0 ) {
+ assertFalse("'" + testBucket + "' must contain key with prefix '" + prefix + "'", summaries.isEmpty());
+ }
+ assertEquals("'"+testBucket+"' contains expected number of keys with prefix '"+prefix+"'", expectedCount, summaries.size() );
+ for( S3Object s3Object : summaries ){
+ assertTrue( "Unexpected prefix:" + s3Object.key(), s3Object.key().startsWith( prefix ) );
+ }
+ }
+
+ /**
+ * Test case to check if a bucket is created properly.
+ */
+ @Test
+ public void testCreateBucket() {
+ s3.createBucket(CreateBucketRequest.builder().bucket("testBucket1").build());
+ assertTrue("The bucket 'testBucket1' should exist", doesBucketExist( s3,"testBucket1"));
+ try {
+ s3.createBucket( CreateBucketRequest.builder().bucket("testBucket1").build());
+ } catch (BucketAlreadyExistsException e) {
+ assertTrue(e.getMessage().length() > 5);
+ }catch (Throwable e){
+ fail("Unexpected exception: "+e.getMessage());
+ }
+ }
+
+
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/package-info.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/package-info.java
new file mode 100644
index 000000000..9be7807df
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/client/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ * Contains internal tests or test related classes and interfaces.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.client;
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionServiceTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionServiceTest.java
new file mode 100644
index 000000000..0a567e011
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/AsymmetricKeyEncryptionServiceTest.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.nio.charset.StandardCharsets;
+import java.security.*;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.Base64;
+
+/**
+ * Contains tests for {@link AsymmetricKeyEncryptionService}.
+ */
+public class AsymmetricKeyEncryptionServiceTest extends GridCommonAbstractTest {
+ /** Asymmetric key encryption service. */
+ private AsymmetricKeyEncryptionService encryptionSvc;
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTest() {
+ try {
+ String algo = "RSA";
+ // Public and private key pair is generated using 'openssl'
+ String publicKeyStr = "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCbuQ7RcOtsHf2oGQ" +
+ "b//cMgfN9kS8tsn21BOAnXwkBN0LwpVXdw1SAfN6fhdJqr4Z585IgF" +
+ "EDOlimoDZ2pXHZ6NfmAot4xkioXlsX+lsSir3gMtPfJhtTFvvnvzgr" +
+ "ZGWVxu0eLBCiuhlUpYNTHlFaiD8C/Qj7eRY+tUagZRskug8QIDAQAB";
+
+ String privateKeyStr = "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJu5D" +
+ "tFw62wd/agZBv/9wyB832RLy2yfbUE4CdfCQE3QvClVd3DVIB83p+" +
+ "F0mqvhnnzkiAUQM6WKagNnalcdno1+YCi3jGSKheWxf6WxKKveAy0" +
+ "98mG1MW++e/OCtkZZXG7R4sEKK6GVSlg1MeUVqIPwL9CPt5Fj61Rq" +
+ "BlGyS6DxAgMBAAECgYEAj+lILnqitvpIb08hzvYfnCiK8s+xIaN8f" +
+ "qdhQUo9zyw2mCRqC5aK5w6yUYNHZc1OgLFamwNMF5KBQsAR4Ix492" +
+ "1K8ch4fmqtnaD4wlx3euyH1+ZjmagzutlFHKxKOnFuoaWeWJj0RN2" +
+ "f2S3dci2Kh1hkde3PylOgOfKXmz0MfAECQQDMjqEr4KdWnAUwBFgP" +
+ "+48wQufpfWzTt2rR7lDxfWoeoo0BlIPVEgvrjmr3mwcX2/kyZK1tD" +
+ "Hf9BSTI65a9zl4hAkEAwuJ7mmd/emqXCqgIs8qsLaaNnZUfTTyzb4" +
+ "iHgFyh/FEyXeuPN/hyg3Hch2/uA+ZFW+Bc46GSSmzWK4RTJGfI0QJ" +
+ "BAI3tHBhUe+ZUxCinqu4T7SpgEYZoNrzCkwPrJRAYoyt0Pv9sqveH" +
+ "2Otr2f3H+2jrgAAd6FI0B4BvNDGPe/xfleECQHkopP+RaMeKjOyrG" +
+ "v3r+q9G5LQbiaJTIpssnlFHRc3ADTgmwpthcpAVsaziAW+bMXO1QQ" +
+ "qj4Hc0wtG7KpVvkIECQBm72Wh6od+BFeWq2iN7XiXIAgXRRvfVTuD" +
+ "KFM3vYQlszEsTI2YKcCg2Lg1oFoHn/tuRjOajNs6eWz/0BWzfuHY=";
+
+ PublicKey publicKey = KeyFactory.getInstance(algo)
+ .generatePublic(new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyStr)));
+
+ PrivateKey privateKey = KeyFactory.getInstance(algo)
+ .generatePrivate(new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyStr)));
+
+ KeyPair keyPair = new KeyPair(publicKey, privateKey);
+
+ encryptionSvc = new AsymmetricKeyEncryptionService();
+ encryptionSvc.setKeyPair(keyPair);
+ encryptionSvc.init();
+ }
+ catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
+ Assert.fail();
+ }
+ }
+
+ /**
+ * Test encryption and decryption.
+ */
+ @Test
+ public void testEncryptDecrypt() {
+ byte[] testData = "This is some test data.".getBytes(StandardCharsets.UTF_8);
+
+ byte[] encData = encryptionSvc.encrypt(testData);
+ byte[] decData = encryptionSvc.decrypt(encData);
+
+ Assert.assertArrayEquals(testData, decData);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionService.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionService.java
new file mode 100644
index 000000000..07e672cdd
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionService.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import javax.crypto.SecretKey;
+import javax.crypto.spec.SecretKeySpec;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Class to provide a mock implementation of {@link EncryptionService}.
+ */
+public class MockEncryptionService implements EncryptionService {
+ /** Encryption service. */
+ private final EncryptionService encryptionSvc;
+
+ /**
+ * Constructor
+ *
+ * @param encryptionSvc Encryption service.
+ */
+ private MockEncryptionService(EncryptionService encryptionSvc) {
+ this.encryptionSvc = encryptionSvc;
+ }
+
+ /**
+ * @return An instance of this class.
+ */
+ public static MockEncryptionService instance() {
+ SecretKey secretKey = new SecretKeySpec("0000000000000000".getBytes(StandardCharsets.UTF_8), "AES");
+ EncryptionService encryptionSvc = new SymmetricKeyEncryptionService().setSecretKey(secretKey);
+
+ encryptionSvc.init();
+
+ return new MockEncryptionService(encryptionSvc);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void init() {
+ // Nothing to do
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] encrypt(byte[] payload) {
+ return encryptionSvc.encrypt(payload);
+ }
+
+ /** {@inheritDoc} */
+ @Override public byte[] decrypt(byte[] payload) {
+ return encryptionSvc.decrypt(payload);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionServiceTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionServiceTest.java
new file mode 100644
index 000000000..933e448cb
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/MockEncryptionServiceTest.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Class to test {@link MockEncryptionService}.
+ */
+public class MockEncryptionServiceTest extends GridCommonAbstractTest {
+ /** Mock encryption service. */
+ private MockEncryptionService mockEncryptionSvc;
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTest() {
+ mockEncryptionSvc = MockEncryptionService.instance();
+ }
+
+ /**
+ * Test if the service correctly encrypts and decrypts data.
+ */
+ @Test
+ public void testEncryptDecrypt() {
+ byte[] testStr = "test string".getBytes(StandardCharsets.UTF_8);
+
+ byte[] encData = mockEncryptionSvc.encrypt(testStr);
+ byte[] decData = mockEncryptionSvc.decrypt(encData);
+
+ Assert.assertArrayEquals(testStr, decData);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionServiceTest.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionServiceTest.java
new file mode 100644
index 000000000..3a6ca8ce9
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/SymmetricKeyEncryptionServiceTest.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+import javax.crypto.SecretKey;
+import javax.crypto.spec.SecretKeySpec;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Class to test {@link SymmetricKeyEncryptionService}.
+ */
+public class SymmetricKeyEncryptionServiceTest extends GridCommonAbstractTest {
+ /** Symmetric key encryption service. */
+ private SymmetricKeyEncryptionService encryptionSvc;
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTest() {
+ byte[] key = "0000000000000000".getBytes(StandardCharsets.UTF_8);
+ SecretKey secretKey = new SecretKeySpec(key, "AES");
+
+ encryptionSvc = new SymmetricKeyEncryptionService().setSecretKey(secretKey);
+ encryptionSvc.init();
+ }
+
+ /**
+ * Test whether encryption and decryption.
+ */
+ @Test
+ public void testEncryptDecrypt() {
+ byte[] testData = "test string".getBytes(StandardCharsets.UTF_8);
+ byte[] encData = encryptionSvc.encrypt(testData);
+ byte[] decData = encryptionSvc.decrypt(encData);
+
+ Assert.assertArrayEquals(testData, decData);
+ }
+}
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java
new file mode 100644
index 000000000..0c62f7c3e
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/encrypt/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains internal tests or test related classes and interfaces.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2.encrypt;
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java
new file mode 100644
index 000000000..4251a9214
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3_v2/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ * Contains internal tests or test related classes and interfaces.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3_v2;
diff --git a/modules/aws2-ext/src/test/java/org/apache/ignite/util/IgniteS3TestConfiguration.java b/modules/aws2-ext/src/test/java/org/apache/ignite/util/IgniteS3TestConfiguration.java
new file mode 100644
index 000000000..6204c118d
--- /dev/null
+++ b/modules/aws2-ext/src/test/java/org/apache/ignite/util/IgniteS3TestConfiguration.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.util;
+
+import org.apache.ignite.internal.util.typedef.X;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+
+/**
+ * S3 tests configuration.
+ */
+public final class IgniteS3TestConfiguration {
+ /**
+ * @return Access key.
+ */
+ public static String getAccessKey() {
+ return getRequiredEnvVar("test.amazon.access.key");
+ }
+
+ /**
+ * @return Access key.
+ */
+ public static String getSecretKey() {
+ return getRequiredEnvVar("test.amazon.secret.key");
+ }
+
+ /**
+ * @param dfltBucketName Default bucket name.
+ * @return Bucket name.
+ */
+ public static String getBucketName(final String dfltBucketName) {
+ String val = X.getSystemOrEnv("test.s3.bucket.name");
+
+ return val == null ? dfltBucketName : val;
+ }
+
+ public static String getAwsRegion(final String dfltRegion) {
+ String val = X.getSystemOrEnv("test.s3.region");
+
+ return val == null ? dfltRegion : val;
+ }
+
+ /**
+ * @param name Name of environment.
+ * @return Environment variable value.
+ */
+ private static String getRequiredEnvVar(String name) {
+ String key = X.getSystemOrEnv(name);
+
+ assert key != null : String.format("Environment variable '%s' is not set", name);
+
+ return key;
+ }
+
+ public static AwsBasicCredentials getAwsCredentials() {
+ return AwsBasicCredentials.create(IgniteS3TestConfiguration.getAccessKey(),
+ IgniteS3TestConfiguration.getSecretKey());
+ }
+ public static AwsCredentialsProvider getAwsCredentialsProvider() {
+ return StaticCredentialsProvider.create(IgniteS3TestConfiguration.getAwsCredentials());
+ }
+}
diff --git a/modules/aws2-ext/src/test/resources/log4j2.xml b/modules/aws2-ext/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..b9318dd15
--- /dev/null
+++ b/modules/aws2-ext/src/test/resources/log4j2.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pom.xml b/pom.xml
index 55f270e82..ddb579180 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,6 +46,7 @@
modules/performance-statistics-ext
modules/cdc-ext
modules/aws-ext
+ modules/aws2-ext
modules/azure-ext
modules/gce-ext
modules/zookeeper-ip-finder-ext