diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index ab40d21d7..d9fdc8b1d 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -2,5 +2,7 @@
*Description of changes:*
+*Target (OCI, Managed Runtime, both):*
+
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 000000000..88f18ea29
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,10 @@
+version: 2
+updates:
+ - package-ecosystem: "maven"
+ directory: "/aws-lambda-java-runtime-interface"
+ schedule:
+ interval: "weekly"
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
\ No newline at end of file
diff --git a/.github/workflows/aws-lambda-java-core.yml b/.github/workflows/aws-lambda-java-core.yml
new file mode 100644
index 000000000..b1bed919f
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-core.yml
@@ -0,0 +1,46 @@
+# This workflow will be triggered if there will be changes to aws-lambda-java-core
+# package and it builds the package and the packages that depend on it.
+
+name: Java CI aws-lambda-java-core
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-core/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-core/**'
+ - '.github/workflows/aws-lambda-java-core.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install core with Maven
+ run: mvn -B install --file aws-lambda-java-core/pom.xml
+
+ # Package modules that depend on base module
+ - name: Package log4j2 with Maven
+ run: mvn -B package --file aws-lambda-java-log4j2/pom.xml
+
+ # Test Runtime Interface Client
+ - name: Run 'pr' target
+ working-directory: ./aws-lambda-java-runtime-interface-client
+ run: make pr
+ env:
+ IS_JAVA_8: true
diff --git a/.github/workflows/aws-lambda-java-events-sdk-transformer.yml b/.github/workflows/aws-lambda-java-events-sdk-transformer.yml
new file mode 100644
index 000000000..1f1f08870
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-events-sdk-transformer.yml
@@ -0,0 +1,39 @@
+# This workflow will be triggered if there will be changes to
+# aws-lambda-java-events-sdk-transformer package and it builds the package.
+
+name: Java CI aws-lambda-java-events-sdk-transformer
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-events-sdk-transformer/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-events-sdk-transformer/**'
+ - '.github/workflows/aws-lambda-java-events-sdk-transformer.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install events with Maven
+ run: mvn -B install --file aws-lambda-java-events/pom.xml
+ # Package target module
+ - name: Package events-sdk-transformer with Maven
+ run: mvn -B package --file aws-lambda-java-events-sdk-transformer/pom.xml
+
diff --git a/.github/workflows/aws-lambda-java-events.yml b/.github/workflows/aws-lambda-java-events.yml
new file mode 100644
index 000000000..2d101018d
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-events.yml
@@ -0,0 +1,42 @@
+# This workflow will be triggered if there will be changes to aws-lambda-java-events
+# package and it builds the package and the packages that depend on it.
+
+name: Java CI aws-lambda-java-events
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-events/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-events/**'
+ - '.github/workflows/aws-lambda-java-events.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install events with Maven
+ run: mvn -B install --file aws-lambda-java-events/pom.xml
+
+ # Package modules that depend on base module
+ - name: Package serialization with Maven
+ run: mvn -B package --file aws-lambda-java-serialization/pom.xml
+ - name: Package events-sdk-transformer with Maven
+ run: mvn -B package --file aws-lambda-java-events-sdk-transformer/pom.xml
+
diff --git a/.github/workflows/aws-lambda-java-log4j2.yml b/.github/workflows/aws-lambda-java-log4j2.yml
new file mode 100644
index 000000000..e9f6a56c1
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-log4j2.yml
@@ -0,0 +1,39 @@
+# This workflow will be triggered if there will be changes to
+# aws-lambda-java-log4j2 package and it builds the package.
+
+name: Java CI aws-lambda-java-log4j2
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-log4j2/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-log4j2/**'
+ - '.github/workflows/aws-lambda-java-log4j2.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install core with Maven
+ run: mvn -B install --file aws-lambda-java-core/pom.xml
+ # Package target module
+ - name: Package log4j2 with Maven
+ run: mvn -B package --file aws-lambda-java-log4j2/pom.xml
+
diff --git a/.github/workflows/aws-lambda-java-profiler.yml b/.github/workflows/aws-lambda-java-profiler.yml
new file mode 100644
index 000000000..a3afe3729
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-profiler.yml
@@ -0,0 +1,78 @@
+name: Run integration tests for aws-lambda-java-profiler
+
+on:
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'experimental/aws-lambda-java-profiler/**'
+ - '.github/workflows/aws-lambda-java-profiler.yml'
+ push:
+ branches: ['*']
+ paths:
+ - 'experimental/aws-lambda-java-profiler/**'
+ - '.github/workflows/aws-lambda-java-profiler.yml'
+
+jobs:
+
+ build:
+ runs-on: ubuntu-latest
+
+ permissions:
+ id-token: write
+ contents: read
+
+ steps:
+ - uses: actions/checkout@v5
+
+ - name: Set up JDK
+ uses: actions/setup-java@v4
+ with:
+ java-version: 21
+ distribution: corretto
+
+ - name: Issue AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-region: ${{ secrets.AWS_REGION_PROFILER_EXTENSION_INTEGRATION_TEST }}
+ role-to-assume: ${{ secrets.AWS_ROLE_PROFILER_EXTENSION_INTEGRATION_TEST }}
+ role-session-name: GitHubActionsRunIntegrationTests
+ role-duration-seconds: 900
+
+ - name: Build layer
+ working-directory: ./experimental/aws-lambda-java-profiler/extension
+ run: ./build_layer.sh
+
+ - name: Publish layer
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/publish_layer.sh
+
+ - name: Create the bucket layer
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/create_bucket.sh
+
+ - name: Create Java function
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/create_function.sh
+
+ - name: Invoke Java function
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/invoke_function.sh
+
+ - name: Invoke Java Custom Options function
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/invoke_function_custom_options.sh
+
+ - name: Download from s3
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/download_from_s3.sh
+
+ - name: Upload profiles
+ uses: actions/upload-artifact@v4
+ with:
+ name: profiles
+ path: /tmp/s3-artifacts
+
+ - name: cleanup
+ if: always()
+ working-directory: ./experimental/aws-lambda-java-profiler
+ run: ./integration_tests/cleanup.sh
\ No newline at end of file
diff --git a/.github/workflows/aws-lambda-java-serialization.yml b/.github/workflows/aws-lambda-java-serialization.yml
new file mode 100644
index 000000000..13b7e08b0
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-serialization.yml
@@ -0,0 +1,43 @@
+# This workflow will be triggered if there will be changes to aws-lambda-java-serialization
+# package and it builds the package and the packages that depend on it.
+
+name: Java CI aws-lambda-java-serialization
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-serialization/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-serialization/**'
+ - '.github/workflows/aws-lambda-java-serialization.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install events with Maven
+ run: mvn -B install --file aws-lambda-java-events/pom.xml
+
+ # Package and install target module
+ - name: Package serialization with Maven
+ run: mvn -B package install --file aws-lambda-java-serialization/pom.xml
+
+ # Run tests
+ - name: Run tests from aws-lambda-java-tests
+ run: mvn test --file aws-lambda-java-tests/pom.xml
diff --git a/.github/workflows/aws-lambda-java-tests.yml b/.github/workflows/aws-lambda-java-tests.yml
new file mode 100644
index 000000000..720c52c11
--- /dev/null
+++ b/.github/workflows/aws-lambda-java-tests.yml
@@ -0,0 +1,42 @@
+# This workflow will be triggered if there will be changes to aws-lambda-java-tests
+# package and it builds the package and the packages that depend on it.
+
+name: Java CI aws-lambda-java-tests
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-tests/**'
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - 'aws-lambda-java-tests/**'
+ - '.github/workflows/aws-lambda-java-tests.yml'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ # Install base module
+ - name: Install events with Maven
+ run: mvn -B install --file aws-lambda-java-events/pom.xml
+ - name: Install serialization with Maven
+ run: mvn -B install --file aws-lambda-java-serialization/pom.xml
+
+ # Package target module
+ - name: Package tests with Maven
+ run: mvn -B package --file aws-lambda-java-tests/pom.xml
+
diff --git a/.github/workflows/repo-sync.yml b/.github/workflows/repo-sync.yml
new file mode 100644
index 000000000..2d97bc868
--- /dev/null
+++ b/.github/workflows/repo-sync.yml
@@ -0,0 +1,39 @@
+name: Repo Sync
+
+on:
+ schedule:
+ - cron: "0 8 * * 1-5" # At 08:00 on every day-of-week from Monday through Friday
+ pull_request:
+ branches: [ '*' ]
+ paths:
+ - '.github/workflows/repo-sync.yml'
+ workflow_dispatch:
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ repo-sync:
+ name: Repo Sync
+ runs-on: ubuntu-latest
+ env:
+ IS_CONFIGURED: ${{ secrets.SOURCE_REPO != '' }}
+ steps:
+ - uses: actions/checkout@v5
+ if: ${{ env.IS_CONFIGURED == 'true' }}
+ - uses: repo-sync/github-sync@v2
+ name: Sync repo to branch
+ if: ${{ env.IS_CONFIGURED == 'true' }}
+ with:
+ source_repo: ${{ secrets.SOURCE_REPO }}
+ source_branch: main
+ destination_branch: ${{ secrets.INTERMEDIATE_BRANCH }}
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ - uses: repo-sync/pull-request@v2
+ name: Create pull request
+ if: ${{ env.IS_CONFIGURED == 'true' }}
+ with:
+ source_branch: ${{ secrets.INTERMEDIATE_BRANCH }}
+ destination_branch: main
+ github_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/runtime-interface-client_merge_to_main.yml b/.github/workflows/runtime-interface-client_merge_to_main.yml
new file mode 100644
index 000000000..3560207f3
--- /dev/null
+++ b/.github/workflows/runtime-interface-client_merge_to_main.yml
@@ -0,0 +1,95 @@
+# This workflow will be triggered on merge to the main branch if
+# aws-lambda-java-runtime-interface-client package was changed
+#
+# It will publish artifacts to CodeArtifact repository, specified by properties defined in GitHub repo secrets:
+# CODE_ARTIFACT_REPO_ACCOUNT, AWS_REGION, CODE_ARTIFACT_REPO_NAME, CODE_ARTIFACT_DOMAIN
+# and will assume role specified by AWS_ROLE
+#
+# Prerequisite setup:
+# https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services
+
+name: Publish artifact for aws-lambda-java-runtime-interface-client
+
+on:
+ push:
+ branches: [ main ]
+ paths:
+ - 'aws-lambda-java-runtime-interface-client/**'
+ - '.github/workflows/runtime-interface-client_*.yml'
+ workflow_dispatch:
+
+jobs:
+
+ publish:
+ runs-on: ubuntu-latest
+
+ permissions:
+ id-token: write
+ contents: read
+
+ steps:
+ - uses: actions/checkout@v5
+
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v4
+ with:
+ java-version: 8
+ distribution: corretto
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ install: true
+
+ - name: Available buildx platforms
+ run: echo ${{ steps.buildx.outputs.platforms }}
+
+ - name: Build and install serialization dependency locally
+ working-directory: ./aws-lambda-java-serialization
+ run: mvn clean install
+
+ - name: Test Runtime Interface Client xplatform build - Run 'build' target
+ working-directory: ./aws-lambda-java-runtime-interface-client
+ run: make build
+ env:
+ IS_JAVA_8: true
+
+ - name: Issue AWS credentials
+ if: env.ENABLE_SNAPSHOT != null
+ env:
+ ENABLE_SNAPSHOT: ${{ secrets.ENABLE_SNAPSHOT }}
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-region: ${{ secrets.AWS_REGION }}
+ role-to-assume: ${{ secrets.AWS_ROLE }}
+ role-session-name: GitHubActionsPublishPackage
+ role-duration-seconds: 900
+
+ - name: Prepare codeartifact properties
+ if: env.ENABLE_SNAPSHOT != null
+ env:
+ ENABLE_SNAPSHOT: ${{ secrets.ENABLE_SNAPSHOT }}
+ working-directory: ./aws-lambda-java-runtime-interface-client/ric-dev-environment
+ run: |
+ cat <
* Logging will not be done: *
+ * See Pre Token Generation Lambda Trigger
+ */
+@Data
+@EqualsAndHashCode(callSuper = true)
+@NoArgsConstructor
+@ToString(callSuper = true)
+public class CognitoUserPoolPreTokenGenerationEventV2 extends CognitoUserPoolEvent {
+ /**
+ * The request from the Amazon Cognito service.
+ */
+ private Request request;
+
+ /**
+ * The response from your Lambda trigger.
+ */
+ private Response response;
+
+ @Builder(setterPrefix = "with")
+ public CognitoUserPoolPreTokenGenerationEventV2(
+ String version,
+ String triggerSource,
+ String region,
+ String userPoolId,
+ String userName,
+ CallerContext callerContext,
+ Request request,
+ Response response) {
+ super(version, triggerSource, region, userPoolId, userName, callerContext);
+ this.request = request;
+ this.response = response;
+ }
+
+ @Data
+ @EqualsAndHashCode(callSuper = true)
+ @NoArgsConstructor
+ @ToString(callSuper = true)
+ public static class Request extends CognitoUserPoolEvent.Request {
+
+ private String[] scopes;
+ private GroupConfiguration groupConfiguration;
+ private Map
+ * "S": "Hello"
+ */
+ public AttributeValue(String s) {
+ setS(s);
+ }
+
+ /**
+ * Constructs a new DynamodbAttributeValue object. Callers should use the setter or fluent setter (with...) methods to
+ * initialize any additional object members.
+ *
+ * @param sS
+ * An attribute of type String Set. For example:
+ *
+ * An attribute of type String. For example:
+ *
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ */
+ public AttributeValue(java.util.List"S": "Hello"
+ *
+ * "S": "Hello"
+ */
+
+ public void setS(String s) {
+ this.s = s;
+ }
+
+ /**
+ *
+ * An attribute of type String. For example: + *
+ *
+ * "S": "Hello"
+ *
+ * "S": "Hello"
+ */
+
+ public String getS() {
+ return this.s;
+ }
+
+ /**
+ *
+ * An attribute of type String. For example: + *
+ *
+ * "S": "Hello"
+ *
+ * "S": "Hello"
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withS(String s) {
+ setS(s);
+ return this;
+ }
+
+ /**
+ *
+ * An attribute of type Number. For example: + *
+ *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and + * libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + *
+ * + * @param n + * An attribute of type Number. For example: + *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and + * libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + */ + + public void setN(String n) { + this.n = n; + } + + /** + *
+ * An attribute of type Number. For example: + *
+ *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and + * libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + *
+ * + * @return An attribute of type Number. For example: + *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages + * and libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + */ + + public String getN() { + return this.n; + } + + /** + *
+ * An attribute of type Number. For example: + *
+ *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and + * libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + *
+ * + * @param n + * An attribute of type Number. For example: + *
+ * "N": "123.45"
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and + * libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + * @return Returns a reference to this object so that method calls can be chained together. + */ + + public AttributeValue withN(String n) { + setN(n); + return this; + } + + /** + *
+ * An attribute of type Binary. For example: + *
+ *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ *
+ * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. + * Users of the SDK should not perform Base64 encoding on this field. + *
+ *+ * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will + * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or + * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future + * major version of the SDK. + *
+ * + * @param b + * An attribute of type Binary. For example: + *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ */
+
+ public void setB(java.nio.ByteBuffer b) {
+ this.b = b;
+ }
+
+ /**
+ *
+ * An attribute of type Binary. For example: + *
+ *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ *
+ * {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend + * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent + * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. + * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the + * {@code position}. + *
+ * + * @return An attribute of type Binary. For example: + *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ */
+
+ public java.nio.ByteBuffer getB() {
+ return this.b;
+ }
+
+ /**
+ *
+ * An attribute of type Binary. For example: + *
+ *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ *
+ * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. + * Users of the SDK should not perform Base64 encoding on this field. + *
+ *+ * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will + * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or + * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future + * major version of the SDK. + *
+ * + * @param b + * An attribute of type Binary. For example: + *
+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withB(java.nio.ByteBuffer b) {
+ setB(b);
+ return this;
+ }
+
+ /**
+ *
+ * An attribute of type String Set. For example: + *
+ *
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ *
+ *
+ * An attribute of type String Set. For example:
+ *
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ */
+
+ public java.util.List"SS": ["Giraffe", "Hippo" ,"Zebra"]
+ *
+ *
+ * An attribute of type String Set. For example:
+ *
+ *
+ * NOTE: This method appends the values to the existing list (if any). Use
+ * {@link #setSS(java.util.Collection)} or {@link #withSS(java.util.Collection)} if you want to override the
+ * existing values.
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ */
+
+ public void setSS(java.util.Collection"SS": ["Giraffe", "Hippo" ,"Zebra"]
+ *
+ *
+ * An attribute of type String Set. For example:
+ *
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withSS(String... sS) {
+ if (this.sS == null) {
+ setSS(new java.util.ArrayList"SS": ["Giraffe", "Hippo" ,"Zebra"]
+ *
+ *
+ * An attribute of type Number Set. For example:
+ *
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ * "SS": ["Giraffe", "Hippo" ,"Zebra"]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withSS(java.util.Collection"NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages
+ * and libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ */
+
+ public java.util.List
+ * An attribute of type Number Set. For example:
+ *
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ */
+
+ public void setNS(java.util.Collection
+ * An attribute of type Number Set. For example:
+ *
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ *
+ * NOTE: This method appends the values to the existing list (if any). Use
+ * {@link #setNS(java.util.Collection)} or {@link #withNS(java.util.Collection)} if you want to override the
+ * existing values.
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withNS(String... nS) {
+ if (this.nS == null) {
+ setNS(new java.util.ArrayList
+ * An attribute of type Number Set. For example:
+ *
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * "NS": ["42.2", "-19", "7.5", "3.14"]
+ *
+ * Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and
+ * libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withNS(java.util.Collection
+ * An attribute of type Binary Set. For example:
+ *
+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ *
+ *
+ * An attribute of type Binary Set. For example:
+ *
+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ */
+
+ public java.util.List"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ *
+ *
+ * An attribute of type Binary Set. For example:
+ *
+ *
+ * NOTE: This method appends the values to the existing list (if any). Use
+ * {@link #setBS(java.util.Collection)} or {@link #withBS(java.util.Collection)} if you want to override the
+ * existing values.
+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ */
+
+ public void setBS(java.util.Collection"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ *
+ *
+ * An attribute of type Binary Set. For example:
+ *
+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withBS(java.nio.ByteBuffer... bS) {
+ if (this.bS == null) {
+ setBS(new java.util.ArrayList"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ *
+ *
+ * An attribute of type Map. For example:
+ *
+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withBS(java.util.Collection"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ *
+ *
+ * An attribute of type Map. For example:
+ *
+ * "M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ */
+
+ public java.util.Map"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ *
+ *
+ * An attribute of type Map. For example:
+ *
+ * "M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ */
+
+ public void setM(java.util.Map"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ *
+ *
+ * An attribute of type List. For example:
+ *
+ * "M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withM(java.util.Map"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ *
+ *
+ * An attribute of type List. For example:
+ *
+ * "L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ */
+
+ public java.util.List"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ *
+ *
+ * An attribute of type List. For example:
+ *
+ *
+ * NOTE: This method appends the values to the existing list (if any). Use
+ * {@link #setL(java.util.Collection)} or {@link #withL(java.util.Collection)} if you want to override the existing
+ * values.
+ * "L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ */
+
+ public void setL(java.util.Collection"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ *
+ *
+ * An attribute of type List. For example:
+ *
+ * "L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withL(AttributeValue... l) {
+ if (this.l == null) {
+ setL(new java.util.ArrayList"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ *
+ *
+ * An attribute of type Null. For example:
+ *
+ * "L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N", "3.14159"}]
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withL(java.util.Collection"NULL": true
+ *
+ * "NULL": true
+ */
+
+ public void setNULL(Boolean nULLValue) {
+ this.nULLValue = nULLValue;
+ }
+
+ /**
+ *
+ * An attribute of type Null. For example: + *
+ *
+ * "NULL": true
+ *
+ * "NULL": true
+ */
+
+ public Boolean getNULL() {
+ return this.nULLValue;
+ }
+
+ /**
+ *
+ * An attribute of type Null. For example: + *
+ *
+ * "NULL": true
+ *
+ * "NULL": true
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withNULL(Boolean nULLValue) {
+ setNULL(nULLValue);
+ return this;
+ }
+
+ /**
+ *
+ * An attribute of type Null. For example: + *
+ *
+ * "NULL": true
+ *
+ * "NULL": true
+ */
+
+ public Boolean isNULL() {
+ return this.nULLValue;
+ }
+
+ /**
+ *
+ * An attribute of type Boolean. For example: + *
+ *
+ * "BOOL": true
+ *
+ * "BOOL": true
+ */
+
+ public void setBOOL(Boolean bOOL) {
+ this.bOOL = bOOL;
+ }
+
+ /**
+ *
+ * An attribute of type Boolean. For example: + *
+ *
+ * "BOOL": true
+ *
+ * "BOOL": true
+ */
+
+ public Boolean getBOOL() {
+ return this.bOOL;
+ }
+
+ /**
+ *
+ * An attribute of type Boolean. For example: + *
+ *
+ * "BOOL": true
+ *
+ * "BOOL": true
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+
+ public AttributeValue withBOOL(Boolean bOOL) {
+ setBOOL(bOOL);
+ return this;
+ }
+
+ /**
+ *
+ * An attribute of type Boolean. For example: + *
+ *
+ * "BOOL": true
+ *
+ * "BOOL": true
+ */
+
+ public Boolean isBOOL() {
+ return this.bOOL;
+ }
+
+ /**
+ * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
+ * redacted from this string using a placeholder value.
+ *
+ * @return A string representation of this object.
+ *
+ * @see Object#toString()
+ */
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{");
+ if (getS() != null)
+ sb.append("S: ").append(getS()).append(",");
+ if (getN() != null)
+ sb.append("N: ").append(getN()).append(",");
+ if (getB() != null)
+ sb.append("B: ").append(getB()).append(",");
+ if (getSS() != null)
+ sb.append("SS: ").append(getSS()).append(",");
+ if (getNS() != null)
+ sb.append("NS: ").append(getNS()).append(",");
+ if (getBS() != null)
+ sb.append("BS: ").append(getBS()).append(",");
+ if (getM() != null)
+ sb.append("M: ").append(getM()).append(",");
+ if (getL() != null)
+ sb.append("L: ").append(getL()).append(",");
+ if (getNULL() != null)
+ sb.append("NULL: ").append(getNULL()).append(",");
+ if (getBOOL() != null)
+ sb.append("BOOL: ").append(getBOOL());
+ sb.append("}");
+ return sb.toString();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+
+ if (obj instanceof AttributeValue == false)
+ return false;
+ AttributeValue other = (AttributeValue) obj;
+ if (other.getS() == null ^ this.getS() == null)
+ return false;
+ if (other.getS() != null && other.getS().equals(this.getS()) == false)
+ return false;
+ if (other.getN() == null ^ this.getN() == null)
+ return false;
+ if (other.getN() != null && other.getN().equals(this.getN()) == false)
+ return false;
+ if (other.getB() == null ^ this.getB() == null)
+ return false;
+ if (other.getB() != null && other.getB().equals(this.getB()) == false)
+ return false;
+ if (other.getSS() == null ^ this.getSS() == null)
+ return false;
+ if (other.getSS() != null && other.getSS().equals(this.getSS()) == false)
+ return false;
+ if (other.getNS() == null ^ this.getNS() == null)
+ return false;
+ if (other.getNS() != null && other.getNS().equals(this.getNS()) == false)
+ return false;
+ if (other.getBS() == null ^ this.getBS() == null)
+ return false;
+ if (other.getBS() != null && other.getBS().equals(this.getBS()) == false)
+ return false;
+ if (other.getM() == null ^ this.getM() == null)
+ return false;
+ if (other.getM() != null && other.getM().equals(this.getM()) == false)
+ return false;
+ if (other.getL() == null ^ this.getL() == null)
+ return false;
+ if (other.getL() != null && other.getL().equals(this.getL()) == false)
+ return false;
+ if (other.getNULL() == null ^ this.getNULL() == null)
+ return false;
+ if (other.getNULL() != null && other.getNULL().equals(this.getNULL()) == false)
+ return false;
+ if (other.getBOOL() == null ^ this.getBOOL() == null)
+ return false;
+ if (other.getBOOL() != null && other.getBOOL().equals(this.getBOOL()) == false)
+ return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int hashCode = 1;
+
+ hashCode = prime * hashCode + ((getS() == null) ? 0 : getS().hashCode());
+ hashCode = prime * hashCode + ((getN() == null) ? 0 : getN().hashCode());
+ hashCode = prime * hashCode + ((getB() == null) ? 0 : getB().hashCode());
+ hashCode = prime * hashCode + ((getSS() == null) ? 0 : getSS().hashCode());
+ hashCode = prime * hashCode + ((getNS() == null) ? 0 : getNS().hashCode());
+ hashCode = prime * hashCode + ((getBS() == null) ? 0 : getBS().hashCode());
+ hashCode = prime * hashCode + ((getM() == null) ? 0 : getM().hashCode());
+ hashCode = prime * hashCode + ((getL() == null) ? 0 : getL().hashCode());
+ hashCode = prime * hashCode + ((getNULL() == null) ? 0 : getNULL().hashCode());
+ hashCode = prime * hashCode + ((getBOOL() == null) ? 0 : getBOOL().hashCode());
+ return hashCode;
+ }
+
+ @Override
+ public AttributeValue clone() {
+ try {
+ return (AttributeValue) super.clone();
+ } catch (CloneNotSupportedException e) {
+ throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() even though we're Cloneable!", e);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Identity.java b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Identity.java
new file mode 100644
index 000000000..12b2fbba1
--- /dev/null
+++ b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Identity.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
+ * the License. A copy of the License is located at
+ *
+ * http://aws.amazon.com/apache2.0
+ *
+ * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+ * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ */
+package com.amazonaws.services.lambda.runtime.events.models.dynamodb;
+
+import java.io.Serializable;
+
+/**
+ *
+ * Contains details about the type of identity that made the request. + *
+ * + * @see AWS API + * Documentation + */ +public class Identity implements Serializable, Cloneable { + + /** + *+ * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + *
+ */ + private String principalId; + + /** + *+ * The type of the identity. For Time To Live, the type is "Service". + *
+ */ + private String type; + + /** + *+ * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + *
+ * + * @param principalId + * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + */ + public void setPrincipalId(String principalId) { + this.principalId = principalId; + } + + /** + *+ * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + *
+ * + * @return A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + */ + public String getPrincipalId() { + return this.principalId; + } + + /** + *+ * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + *
+ * + * @param principalId + * A unique identifier for the entity that made the call. For Time To Live, the principalId is + * "dynamodb.amazonaws.com". + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Identity withPrincipalId(String principalId) { + setPrincipalId(principalId); + return this; + } + + /** + *+ * The type of the identity. For Time To Live, the type is "Service". + *
+ * + * @param type + * The type of the identity. For Time To Live, the type is "Service". + */ + public void setType(String type) { + this.type = type; + } + + /** + *+ * The type of the identity. For Time To Live, the type is "Service". + *
+ * + * @return The type of the identity. For Time To Live, the type is "Service". + */ + public String getType() { + return this.type; + } + + /** + *+ * The type of the identity. For Time To Live, the type is "Service". + *
+ * + * @param type + * The type of the identity. For Time To Live, the type is "Service". + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Identity withType(String type) { + setType(type); + return this; + } + + /** + * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be + * redacted from this string using a placeholder value. + * + * @return A string representation of this object. + * + * @see Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("{"); + if (getPrincipalId() != null) + sb.append("PrincipalId: ").append(getPrincipalId()).append(","); + if (getType() != null) + sb.append("Type: ").append(getType()); + sb.append("}"); + return sb.toString(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + + if (obj instanceof Identity == false) + return false; + Identity other = (Identity) obj; + if (other.getPrincipalId() == null ^ this.getPrincipalId() == null) + return false; + if (other.getPrincipalId() != null && other.getPrincipalId().equals(this.getPrincipalId()) == false) + return false; + if (other.getType() == null ^ this.getType() == null) + return false; + if (other.getType() != null && other.getType().equals(this.getType()) == false) + return false; + return true; + } + + @Override + public int hashCode() { + final int prime = 31; + int hashCode = 1; + + hashCode = prime * hashCode + ((getPrincipalId() == null) ? 0 : getPrincipalId().hashCode()); + hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); + return hashCode; + } + + @Override + public Identity clone() { + try { + return (Identity) super.clone(); + } catch (CloneNotSupportedException e) { + throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); + } + } + +} diff --git a/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/OperationType.java b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/OperationType.java new file mode 100644 index 000000000..8d5574ee1 --- /dev/null +++ b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/OperationType.java @@ -0,0 +1,54 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package com.amazonaws.services.lambda.runtime.events.models.dynamodb; + +public enum OperationType { + + INSERT("INSERT"), + MODIFY("MODIFY"), + REMOVE("REMOVE"); + + private String value; + + private OperationType(String value) { + this.value = value; + } + + @Override + public String toString() { + return this.value; + } + + /** + * Use this in place of valueOf. + * + * @param value + * real value + * @return OperationType corresponding to the value + * + * @throws IllegalArgumentException + * If the specified value does not map to one of the known values in this enum. + */ + public static OperationType fromValue(String value) { + if (value == null || "".equals(value)) { + throw new IllegalArgumentException("Value cannot be null or empty!"); + } + + for (OperationType enumEntry : OperationType.values()) { + if (enumEntry.toString().equals(value)) { + return enumEntry; + } + } + throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); + } +} \ No newline at end of file diff --git a/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Record.java b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Record.java new file mode 100644 index 000000000..81065811f --- /dev/null +++ b/aws-lambda-java-events/src/main/java/com/amazonaws/services/lambda/runtime/events/models/dynamodb/Record.java @@ -0,0 +1,801 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package com.amazonaws.services.lambda.runtime.events.models.dynamodb; + +import java.io.Serializable; + +/** + *+ * A description of a unique event within a stream. + *
+ * + * @see AWS API + * Documentation + */ +public class Record implements Serializable, Cloneable { + + /** + *+ * A globally unique identifier for the event that was recorded in this stream record. + *
+ */ + private String eventID; + /** + *+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The version number of the stream record format. This number is updated whenever the structure of
+ * Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as this
+ * number is subject to change at any time. In general, eventVersion will only increase as the
+ * low-level DynamoDB Streams API evolves.
+ *
+ * The AWS service from which the stream record originated. For DynamoDB Streams, this is aws:dynamodb.
+ *
+ * The region in which the GetRecords request was received.
+ *
+ * The main body of the stream record, containing all of the DynamoDB-specific fields. + *
+ */ + private StreamRecord dynamodb; + /** + *+ * Items that are deleted by the Time to Live process after expiration have the following fields: + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * A globally unique identifier for the event that was recorded in this stream record. + *
+ * + * @param eventID + * A globally unique identifier for the event that was recorded in this stream record. + */ + public void setEventID(String eventID) { + this.eventID = eventID; + } + + /** + *+ * A globally unique identifier for the event that was recorded in this stream record. + *
+ * + * @return A globally unique identifier for the event that was recorded in this stream record. + */ + public String getEventID() { + return this.eventID; + } + + /** + *+ * A globally unique identifier for the event that was recorded in this stream record. + *
+ * + * @param eventID + * A globally unique identifier for the event that was recorded in this stream record. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withEventID(String eventID) { + setEventID(eventID); + return this; + } + + /** + *+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The type of data modification that was performed on the DynamoDB table: + *
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * INSERT - a new item was added to the table.
+ *
+ * MODIFY - one or more of an existing item's attributes were modified.
+ *
+ * REMOVE - the item was deleted from the table
+ *
+ * The version number of the stream record format. This number is updated whenever the structure of
+ * Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as this
+ * number is subject to change at any time. In general, eventVersion will only increase as the
+ * low-level DynamoDB Streams API evolves.
+ *
Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as
+ * this number is subject to change at any time. In general, eventVersion will only increase as
+ * the low-level DynamoDB Streams API evolves.
+ */
+ public void setEventVersion(String eventVersion) {
+ this.eventVersion = eventVersion;
+ }
+
+ /**
+ *
+ * The version number of the stream record format. This number is updated whenever the structure of
+ * Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as this
+ * number is subject to change at any time. In general, eventVersion will only increase as the
+ * low-level DynamoDB Streams API evolves.
+ *
Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as
+ * this number is subject to change at any time. In general, eventVersion will only increase as
+ * the low-level DynamoDB Streams API evolves.
+ */
+ public String getEventVersion() {
+ return this.eventVersion;
+ }
+
+ /**
+ *
+ * The version number of the stream record format. This number is updated whenever the structure of
+ * Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as this
+ * number is subject to change at any time. In general, eventVersion will only increase as the
+ * low-level DynamoDB Streams API evolves.
+ *
Record is modified.
+ *
+ * Client applications must not assume that eventVersion will remain at a particular value, as
+ * this number is subject to change at any time. In general, eventVersion will only increase as
+ * the low-level DynamoDB Streams API evolves.
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+ public Record withEventVersion(String eventVersion) {
+ setEventVersion(eventVersion);
+ return this;
+ }
+
+ /**
+ *
+ * The AWS service from which the stream record originated. For DynamoDB Streams, this is aws:dynamodb.
+ *
aws:dynamodb.
+ */
+ public void setEventSource(String eventSource) {
+ this.eventSource = eventSource;
+ }
+
+ /**
+ *
+ * The AWS service from which the stream record originated. For DynamoDB Streams, this is aws:dynamodb.
+ *
aws:dynamodb.
+ */
+ public String getEventSource() {
+ return this.eventSource;
+ }
+
+ /**
+ *
+ * The AWS service from which the stream record originated. For DynamoDB Streams, this is aws:dynamodb.
+ *
aws:dynamodb.
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+ public Record withEventSource(String eventSource) {
+ setEventSource(eventSource);
+ return this;
+ }
+
+ /**
+ *
+ * The region in which the GetRecords request was received.
+ *
GetRecords request was received.
+ */
+ public void setAwsRegion(String awsRegion) {
+ this.awsRegion = awsRegion;
+ }
+
+ /**
+ *
+ * The region in which the GetRecords request was received.
+ *
GetRecords request was received.
+ */
+ public String getAwsRegion() {
+ return this.awsRegion;
+ }
+
+ /**
+ *
+ * The region in which the GetRecords request was received.
+ *
GetRecords request was received.
+ * @return Returns a reference to this object so that method calls can be chained together.
+ */
+ public Record withAwsRegion(String awsRegion) {
+ setAwsRegion(awsRegion);
+ return this;
+ }
+
+ /**
+ * + * The main body of the stream record, containing all of the DynamoDB-specific fields. + *
+ * + * @param dynamodb + * The main body of the stream record, containing all of the DynamoDB-specific fields. + */ + public void setDynamodb(StreamRecord dynamodb) { + this.dynamodb = dynamodb; + } + + /** + *+ * The main body of the stream record, containing all of the DynamoDB-specific fields. + *
+ * + * @return The main body of the stream record, containing all of the DynamoDB-specific fields. + */ + public StreamRecord getDynamodb() { + return this.dynamodb; + } + + /** + *+ * The main body of the stream record, containing all of the DynamoDB-specific fields. + *
+ * + * @param dynamodb + * The main body of the stream record, containing all of the DynamoDB-specific fields. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withDynamodb(StreamRecord dynamodb) { + setDynamodb(dynamodb); + return this; + } + + /** + *+ * Items that are deleted by the Time to Live process after expiration have the following fields: + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * Items that are deleted by the Time to Live process after expiration have the following fields: + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * Items that are deleted by the Time to Live process after expiration have the following fields: + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * Records[].userIdentity.type + *
+ *+ * "Service" + *
+ *+ * Records[].userIdentity.principalId + *
+ *+ * "dynamodb.amazonaws.com" + *
+ *+ * A description of a single data modification that was performed on an item in a DynamoDB table. + *
+ * + * @see AWS API + * Documentation + */ +public class StreamRecord implements Serializable, Cloneable { + + /** + *+ * The approximate date and time when the stream record was created, in UNIX epoch time format. + *
+ */ + private java.util.Date approximateCreationDateTime; + /** + *+ * The primary key attribute(s) for the DynamoDB item that was modified. + *
+ */ + private java.util.Map+ * The item in the DynamoDB table as it appeared after it was modified. + *
+ */ + private java.util.Map+ * The item in the DynamoDB table as it appeared before it was modified. + *
+ */ + private java.util.Map+ * The sequence number of the stream record. + *
+ */ + private String sequenceNumber; + /** + *+ * The size of the stream record, in bytes. + *
+ */ + private Long sizeBytes; + /** + *+ * The type of data from the modified DynamoDB item that was captured in this stream record: + *
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * The approximate date and time when the stream record was created, in UNIX epoch time format. + *
+ * + * @param approximateCreationDateTime + * The approximate date and time when the stream record was created, in UNIX epoch time format. + */ + public void setApproximateCreationDateTime(java.util.Date approximateCreationDateTime) { + this.approximateCreationDateTime = approximateCreationDateTime; + } + + /** + *+ * The approximate date and time when the stream record was created, in UNIX epoch time format. + *
+ * + * @return The approximate date and time when the stream record was created, in UNIX epoch time format. + */ + public java.util.Date getApproximateCreationDateTime() { + return this.approximateCreationDateTime; + } + + /** + *+ * The approximate date and time when the stream record was created, in UNIX epoch time format. + *
+ * + * @param approximateCreationDateTime + * The approximate date and time when the stream record was created, in UNIX epoch time format. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withApproximateCreationDateTime(java.util.Date approximateCreationDateTime) { + setApproximateCreationDateTime(approximateCreationDateTime); + return this; + } + + /** + *+ * The primary key attribute(s) for the DynamoDB item that was modified. + *
+ * + * @return The primary key attribute(s) for the DynamoDB item that was modified. + */ + public java.util.Map+ * The primary key attribute(s) for the DynamoDB item that was modified. + *
+ * + * @param keys + * The primary key attribute(s) for the DynamoDB item that was modified. + */ + public void setKeys(java.util.Map+ * The primary key attribute(s) for the DynamoDB item that was modified. + *
+ * + * @param keys + * The primary key attribute(s) for the DynamoDB item that was modified. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withKeys(java.util.Map+ * The item in the DynamoDB table as it appeared after it was modified. + *
+ * + * @return The item in the DynamoDB table as it appeared after it was modified. + */ + public java.util.Map+ * The item in the DynamoDB table as it appeared after it was modified. + *
+ * + * @param newImage + * The item in the DynamoDB table as it appeared after it was modified. + */ + public void setNewImage(java.util.Map+ * The item in the DynamoDB table as it appeared after it was modified. + *
+ * + * @param newImage + * The item in the DynamoDB table as it appeared after it was modified. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withNewImage(java.util.Map+ * The item in the DynamoDB table as it appeared before it was modified. + *
+ * + * @return The item in the DynamoDB table as it appeared before it was modified. + */ + public java.util.Map+ * The item in the DynamoDB table as it appeared before it was modified. + *
+ * + * @param oldImage + * The item in the DynamoDB table as it appeared before it was modified. + */ + public void setOldImage(java.util.Map+ * The item in the DynamoDB table as it appeared before it was modified. + *
+ * + * @param oldImage + * The item in the DynamoDB table as it appeared before it was modified. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withOldImage(java.util.Map+ * The sequence number of the stream record. + *
+ * + * @param sequenceNumber + * The sequence number of the stream record. + */ + public void setSequenceNumber(String sequenceNumber) { + this.sequenceNumber = sequenceNumber; + } + + /** + *+ * The sequence number of the stream record. + *
+ * + * @return The sequence number of the stream record. + */ + public String getSequenceNumber() { + return this.sequenceNumber; + } + + /** + *+ * The sequence number of the stream record. + *
+ * + * @param sequenceNumber + * The sequence number of the stream record. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withSequenceNumber(String sequenceNumber) { + setSequenceNumber(sequenceNumber); + return this; + } + + /** + *+ * The size of the stream record, in bytes. + *
+ * + * @param sizeBytes + * The size of the stream record, in bytes. + */ + public void setSizeBytes(Long sizeBytes) { + this.sizeBytes = sizeBytes; + } + + /** + *+ * The size of the stream record, in bytes. + *
+ * + * @return The size of the stream record, in bytes. + */ + public Long getSizeBytes() { + return this.sizeBytes; + } + + /** + *+ * The size of the stream record, in bytes. + *
+ * + * @param sizeBytes + * The size of the stream record, in bytes. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public StreamRecord withSizeBytes(Long sizeBytes) { + setSizeBytes(sizeBytes); + return this; + } + + /** + *+ * The type of data from the modified DynamoDB item that was captured in this stream record: + *
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * The type of data from the modified DynamoDB item that was captured in this stream record: + *
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * The type of data from the modified DynamoDB item that was captured in this stream record: + *
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * KEYS_ONLY - only the key attributes of the modified item.
+ *
+ * NEW_IMAGE - the entire item, as it appeared after it was modified.
+ *
+ * OLD_IMAGE - the entire item, as it appeared before it was modified.
+ *
+ * NEW_AND_OLD_IMAGES - both the new and the old item images of the item.
+ *
+ * The unit of data of the Kinesis data stream, which is composed of a sequence number, a partition key, and a data + * blob. + *
+ * + * @see AWS API + * Documentation + */ +public class Record implements Serializable, Cloneable { + + /** + *+ * The unique identifier of the record within its shard. + *
+ */ + private String sequenceNumber; + /** + *+ * The approximate time that the record was inserted into the stream. + *
+ */ + private java.util.Date approximateArrivalTimestamp; + /** + *+ * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not inspect, + * interpret, or change the data in the blob in any way. When the data blob (the payload before base64-encoding) is + * added to the partition key size, the total size must not exceed the maximum record size (1 MB). + *
+ */ + private java.nio.ByteBuffer data; + /** + *+ * Identifies which shard in the stream the data record is assigned to. + *
+ */ + private String partitionKey; + /** + *+ * The encryption type used on the record. This parameter can be one of the following values: + *
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.
+ *
+ * The unique identifier of the record within its shard. + *
+ * + * @param sequenceNumber + * The unique identifier of the record within its shard. + */ + public void setSequenceNumber(String sequenceNumber) { + this.sequenceNumber = sequenceNumber; + } + + /** + *+ * The unique identifier of the record within its shard. + *
+ * + * @return The unique identifier of the record within its shard. + */ + public String getSequenceNumber() { + return this.sequenceNumber; + } + + /** + *+ * The unique identifier of the record within its shard. + *
+ * + * @param sequenceNumber + * The unique identifier of the record within its shard. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withSequenceNumber(String sequenceNumber) { + setSequenceNumber(sequenceNumber); + return this; + } + + /** + *+ * The approximate time that the record was inserted into the stream. + *
+ * + * @param approximateArrivalTimestamp + * The approximate time that the record was inserted into the stream. + */ + public void setApproximateArrivalTimestamp(java.util.Date approximateArrivalTimestamp) { + this.approximateArrivalTimestamp = approximateArrivalTimestamp; + } + + /** + *+ * The approximate time that the record was inserted into the stream. + *
+ * + * @return The approximate time that the record was inserted into the stream. + */ + public java.util.Date getApproximateArrivalTimestamp() { + return this.approximateArrivalTimestamp; + } + + /** + *+ * The approximate time that the record was inserted into the stream. + *
+ * + * @param approximateArrivalTimestamp + * The approximate time that the record was inserted into the stream. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withApproximateArrivalTimestamp(java.util.Date approximateArrivalTimestamp) { + setApproximateArrivalTimestamp(approximateArrivalTimestamp); + return this; + } + + /** + *+ * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not inspect, + * interpret, or change the data in the blob in any way. When the data blob (the payload before base64-encoding) is + * added to the partition key size, the total size must not exceed the maximum record size (1 MB). + *
+ *+ * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. + * Users of the SDK should not perform Base64 encoding on this field. + *
+ *+ * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will + * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or + * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future + * major version of the SDK. + *
+ * + * @param data + * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not + * inspect, interpret, or change the data in the blob in any way. When the data blob (the payload before + * base64-encoding) is added to the partition key size, the total size must not exceed the maximum record + * size (1 MB). + */ + public void setData(java.nio.ByteBuffer data) { + this.data = data; + } + + /** + *+ * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not inspect, + * interpret, or change the data in the blob in any way. When the data blob (the payload before base64-encoding) is + * added to the partition key size, the total size must not exceed the maximum record size (1 MB). + *
+ *+ * {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend + * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent + * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. + * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the + * {@code position}. + *
+ * + * @return The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not + * inspect, interpret, or change the data in the blob in any way. When the data blob (the payload before + * base64-encoding) is added to the partition key size, the total size must not exceed the maximum record + * size (1 MB). + */ + public java.nio.ByteBuffer getData() { + return this.data; + } + + /** + *+ * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not inspect, + * interpret, or change the data in the blob in any way. When the data blob (the payload before base64-encoding) is + * added to the partition key size, the total size must not exceed the maximum record size (1 MB). + *
+ *+ * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. + * Users of the SDK should not perform Base64 encoding on this field. + *
+ *+ * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will + * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or + * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future + * major version of the SDK. + *
+ * + * @param data + * The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not + * inspect, interpret, or change the data in the blob in any way. When the data blob (the payload before + * base64-encoding) is added to the partition key size, the total size must not exceed the maximum record + * size (1 MB). + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withData(java.nio.ByteBuffer data) { + setData(data); + return this; + } + + /** + *+ * Identifies which shard in the stream the data record is assigned to. + *
+ * + * @param partitionKey + * Identifies which shard in the stream the data record is assigned to. + */ + public void setPartitionKey(String partitionKey) { + this.partitionKey = partitionKey; + } + + /** + *+ * Identifies which shard in the stream the data record is assigned to. + *
+ * + * @return Identifies which shard in the stream the data record is assigned to. + */ + public String getPartitionKey() { + return this.partitionKey; + } + + /** + *+ * Identifies which shard in the stream the data record is assigned to. + *
+ * + * @param partitionKey + * Identifies which shard in the stream the data record is assigned to. + * @return Returns a reference to this object so that method calls can be chained together. + */ + public Record withPartitionKey(String partitionKey) { + setPartitionKey(partitionKey); + return this; + } + + /** + *+ * The encryption type used on the record. This parameter can be one of the following values: + *
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS
+ * key.
+ *
+ * The encryption type used on the record. This parameter can be one of the following values: + *
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS
+ * KMS key.
+ *
+ * The encryption type used on the record. This parameter can be one of the following values: + *
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS
+ * key.
+ *
+ * The encryption type used on the record. This parameter can be one of the following values: + *
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.
+ *
+ * NONE: Do not encrypt the records in the stream.
+ *
+ * KMS: Use server-side encryption on the records in the stream using a customer-managed AWS KMS
+ * key.
+ *
+ * 1. loads the user's handler.
+ *
+ * 2. enters the Lambda runtime loop which handles function invocations as defined in the Lambda Custom Runtime API.
+ *
+ *
+ * Once initialized, {@link AWSLambda#startRuntime} will halt only if an irrecoverable error occurs.
+ */
+public class AWSLambda {
+
+ private static URLClassLoader customerClassLoader;
+
+ private static final String TRUST_STORE_PROPERTY = "javax.net.ssl.trustStore";
+
+ private static final String JAVA_SECURITY_PROPERTIES = "java.security.properties";
+
+ private static final String NETWORKADDRESS_CACHE_NEGATIVE_TTL_ENV_VAR = "AWS_LAMBDA_JAVA_NETWORKADDRESS_CACHE_NEGATIVE_TTL";
+
+ private static final String NETWORKADDRESS_CACHE_NEGATIVE_TTL_PROPERTY = "networkaddress.cache.negative.ttl";
+
+ private static final String DEFAULT_NEGATIVE_CACHE_TTL = "1";
+
+ // System property for Lambda tracing, see aws-xray-sdk-java/LambdaSegmentContext
+ // https://github.com/aws/aws-xray-sdk-java/blob/2f467e50db61abb2ed2bd630efc21bddeabd64d9/aws-xray-recorder-sdk-core/src/main/java/com/amazonaws/xray/contexts/LambdaSegmentContext.java#L39-L40
+ private static final String LAMBDA_TRACE_HEADER_PROP = "com.amazonaws.xray.traceHeader";
+
+ private static final String INIT_TYPE_SNAP_START = "snap-start";
+
+ private static final String AWS_LAMBDA_INITIALIZATION_TYPE = System.getenv(ReservedRuntimeEnvironmentVariables.AWS_LAMBDA_INITIALIZATION_TYPE);
+
+ private static final String CONCURRENT_TRACE_ID_KEY = "AWS_LAMBDA_X_TRACE_ID";
+
+ static {
+ // Override the disabledAlgorithms setting to match configuration for openjdk8-u181.
+ // This is to keep DES ciphers around while we deploying security updates.
+ Security.setProperty(
+ "jdk.tls.disabledAlgorithms",
+ "SSLv3, RC4, MD5withRSA, DH keySize < 1024, EC keySize < 224, DES40_CBC, RC4_40, 3DES_EDE_CBC"
+ );
+ // Override the location of the trusted certificate authorities to be provided by the system.
+ // The ca-certificates package provides /etc/pki/java/cacerts which becomes the symlink destination
+ // of $java_home/lib/security/cacerts when java is installed in the chroot. Given that java is provided
+ // in /var/lang as opposed to installed in the chroot, this brings it closer.
+ if (System.getProperty(TRUST_STORE_PROPERTY) == null) {
+ final File systemCacerts = new File("/etc/pki/java/cacerts");
+ if (systemCacerts.exists() && systemCacerts.isFile()) {
+ System.setProperty(TRUST_STORE_PROPERTY, systemCacerts.getPath());
+ }
+ }
+
+ if (isNegativeCacheOverridable()) {
+ String ttlFromEnv = System.getenv(NETWORKADDRESS_CACHE_NEGATIVE_TTL_ENV_VAR);
+ String negativeCacheTtl = ttlFromEnv == null ? DEFAULT_NEGATIVE_CACHE_TTL : ttlFromEnv;
+ Security.setProperty(NETWORKADDRESS_CACHE_NEGATIVE_TTL_PROPERTY, negativeCacheTtl);
+ }
+ }
+
+ private static boolean isNegativeCacheOverridable() {
+ String securityPropertiesPath = System.getProperty(JAVA_SECURITY_PROPERTIES);
+ if (securityPropertiesPath == null) {
+ return true;
+ }
+ try (FileInputStream inputStream = new FileInputStream(securityPropertiesPath)) {
+ Properties secProps = new Properties();
+ secProps.load(inputStream);
+ return !secProps.containsKey(NETWORKADDRESS_CACHE_NEGATIVE_TTL_PROPERTY);
+ } catch (IOException e) {
+ return true;
+ }
+ }
+
+ private static LambdaRequestHandler findRequestHandler(final String handlerString, ClassLoader customerClassLoader) {
+ final HandlerInfo handlerInfo;
+ try {
+ handlerInfo = HandlerInfo.fromString(handlerString, customerClassLoader);
+ } catch (HandlerInfo.InvalidHandlerException e) {
+ UserFault userFault = UserFault.makeUserFault("Invalid handler: `" + handlerString + "'");
+ return new UserFaultHandler(userFault);
+ } catch (ClassNotFoundException e) {
+ return LambdaRequestHandler.classNotFound(e, HandlerInfo.className(handlerString));
+ } catch (NoClassDefFoundError e) {
+ return LambdaRequestHandler.initErrorHandler(e, HandlerInfo.className(handlerString));
+ } catch (Throwable t) {
+ throw UserFault.makeInitErrorUserFault(t, HandlerInfo.className(handlerString));
+ }
+
+ final LambdaRequestHandler requestHandler = EventHandlerLoader.loadEventHandler(handlerInfo);
+ // if loading the handler failed and the failure is fatal (for e.g. the constructor threw an exception)
+ // we want to report this as an init error rather than deferring to the first invoke.
+ if (requestHandler instanceof UserFaultHandler) {
+ UserFault userFault = ((UserFaultHandler) requestHandler).fault;
+ if (userFault.fatal) {
+ throw userFault;
+ }
+ }
+ return requestHandler;
+ }
+
+ private static LambdaRequestHandler getLambdaRequestHandlerObject(String handler, LambdaContextLogger lambdaLogger, LambdaRuntimeApiClient runtimeClient) throws ClassNotFoundException, IOException {
+ UnsafeUtil.disableIllegalAccessWarning();
+
+ System.setOut(new PrintStream(new LambdaOutputStream(System.out), false, "UTF-8"));
+ System.setErr(new PrintStream(new LambdaOutputStream(System.err), false, "UTF-8"));
+ setupRuntimeLogger(lambdaLogger);
+
+ String taskRoot = System.getProperty("user.dir");
+ String libRoot = "/opt/java";
+ // Make system classloader the customer classloader's parent to ensure any aws-lambda-java-core classes
+ // are loaded from the system classloader.
+ customerClassLoader = new CustomerClassLoader(taskRoot, libRoot, ClassLoader.getSystemClassLoader());
+ Thread.currentThread().setContextClassLoader(customerClassLoader);
+
+ // Load the user's handler
+ LambdaRequestHandler requestHandler = null;
+ try {
+ requestHandler = findRequestHandler(handler, customerClassLoader);
+ } catch (UserFault userFault) {
+ lambdaLogger.log(userFault.reportableError(), lambdaLogger.getLogFormat() == LogFormat.JSON ? LogLevel.ERROR : LogLevel.UNDEFINED);
+ LambdaError error = new LambdaError(
+ LambdaErrorConverter.fromUserFault(userFault),
+ RapidErrorType.BadFunctionCode);
+ runtimeClient.reportInitError(error);
+ System.exit(1);
+ }
+
+ if (INIT_TYPE_SNAP_START.equals(AWS_LAMBDA_INITIALIZATION_TYPE)) {
+ onInitComplete(lambdaLogger, runtimeClient);
+ }
+
+ return requestHandler;
+ }
+
+ private static void setupRuntimeLogger(LambdaLogger lambdaLogger)
+ throws ClassNotFoundException {
+ ReflectUtil.setStaticField(
+ Class.forName("com.amazonaws.services.lambda.runtime.LambdaRuntime"),
+ "logger",
+ true,
+ lambdaLogger
+ );
+ }
+
+ /**
+ * convert an integer into a FileDescriptor object using reflection to access private members.
+ */
+ private static FileDescriptor intToFd(int fd) throws RuntimeException {
+ try {
+ Class
+ * It is used to generate a class list and Application CDS archive that includes all the possible classes that could be
+ * loaded by the runtime. This simplifies the process of generating the Application CDS archive.
+ */
+public class ClasspathLoader {
+
+ private static final Set