diff --git a/clients/client-acm/src/endpoints.ts b/clients/client-acm/src/endpoints.ts index 308a2c663f4e..355e65ece77b 100644 --- a/clients/client-acm/src/endpoints.ts +++ b/clients/client-acm/src/endpoints.ts @@ -172,7 +172,7 @@ const partitionHash: PartitionHash = { tags: [], }, { - hostname: "acm-fips.{region}.amazonaws.com", + hostname: "acm.{region}.amazonaws.com", tags: ["fips"], }, { diff --git a/clients/client-apprunner/src/models/models_0.ts b/clients/client-apprunner/src/models/models_0.ts index e898af2d5607..e7b280945759 100644 --- a/clients/client-apprunner/src/models/models_0.ts +++ b/clients/client-apprunner/src/models/models_0.ts @@ -696,7 +696,10 @@ export namespace AuthenticationConfiguration { } export enum Runtime { + CORRETTO_11 = "CORRETTO_11", + CORRETTO_8 = "CORRETTO_8", NODEJS_12 = "NODEJS_12", + NODEJS_14 = "NODEJS_14", PYTHON_3 = "PYTHON_3", } diff --git a/clients/client-customer-profiles/src/CustomerProfiles.ts b/clients/client-customer-profiles/src/CustomerProfiles.ts index c56d1b9fce5d..8064b021afa3 100644 --- a/clients/client-customer-profiles/src/CustomerProfiles.ts +++ b/clients/client-customer-profiles/src/CustomerProfiles.ts @@ -10,6 +10,11 @@ import { CreateDomainCommandInput, CreateDomainCommandOutput, } from "./commands/CreateDomainCommand"; +import { + CreateIntegrationWorkflowCommand, + CreateIntegrationWorkflowCommandInput, + CreateIntegrationWorkflowCommandOutput, +} from "./commands/CreateIntegrationWorkflowCommand"; import { CreateProfileCommand, CreateProfileCommandInput, @@ -45,6 +50,11 @@ import { DeleteProfileObjectTypeCommandInput, DeleteProfileObjectTypeCommandOutput, } from "./commands/DeleteProfileObjectTypeCommand"; +import { + DeleteWorkflowCommand, + DeleteWorkflowCommandInput, + DeleteWorkflowCommandOutput, +} from "./commands/DeleteWorkflowCommand"; import { GetAutoMergingPreviewCommand, GetAutoMergingPreviewCommandInput, @@ -72,6 +82,12 @@ import { GetProfileObjectTypeTemplateCommandInput, GetProfileObjectTypeTemplateCommandOutput, } from "./commands/GetProfileObjectTypeTemplateCommand"; +import { GetWorkflowCommand, GetWorkflowCommandInput, GetWorkflowCommandOutput } from "./commands/GetWorkflowCommand"; +import { + GetWorkflowStepsCommand, + GetWorkflowStepsCommandInput, + GetWorkflowStepsCommandOutput, +} from "./commands/GetWorkflowStepsCommand"; import { ListAccountIntegrationsCommand, ListAccountIntegrationsCommandInput, @@ -108,6 +124,11 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "./commands/ListTagsForResourceCommand"; +import { + ListWorkflowsCommand, + ListWorkflowsCommandInput, + ListWorkflowsCommandOutput, +} from "./commands/ListWorkflowsCommand"; import { MergeProfilesCommand, MergeProfilesCommandInput, @@ -237,6 +258,40 @@ export class CustomerProfiles extends CustomerProfilesClient { } } + /** + *

+ * Creates an integration workflow. An integration workflow is an async process which ingests historic data and sets up an integration for ongoing updates. The supported Amazon AppFlow sources are Salesforce, ServiceNow, and Marketo. + *

+ */ + public createIntegrationWorkflow( + args: CreateIntegrationWorkflowCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createIntegrationWorkflow( + args: CreateIntegrationWorkflowCommandInput, + cb: (err: any, data?: CreateIntegrationWorkflowCommandOutput) => void + ): void; + public createIntegrationWorkflow( + args: CreateIntegrationWorkflowCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateIntegrationWorkflowCommandOutput) => void + ): void; + public createIntegrationWorkflow( + args: CreateIntegrationWorkflowCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateIntegrationWorkflowCommandOutput) => void), + cb?: (err: any, data?: CreateIntegrationWorkflowCommandOutput) => void + ): Promise | void { + const command = new CreateIntegrationWorkflowCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Creates a standard profile.

*

A standard profile represents the following attributes for a customer profile in a @@ -464,6 +519,38 @@ export class CustomerProfiles extends CustomerProfilesClient { } } + /** + *

Deletes the specified workflow and all its corresponding resources. This is an async process.

+ */ + public deleteWorkflow( + args: DeleteWorkflowCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteWorkflow( + args: DeleteWorkflowCommandInput, + cb: (err: any, data?: DeleteWorkflowCommandOutput) => void + ): void; + public deleteWorkflow( + args: DeleteWorkflowCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteWorkflowCommandOutput) => void + ): void; + public deleteWorkflow( + args: DeleteWorkflowCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteWorkflowCommandOutput) => void), + cb?: (err: any, data?: DeleteWorkflowCommandOutput) => void + ): Promise | void { + const command = new DeleteWorkflowCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Tests the auto-merging settings of your Identity Resolution Job without merging your data. It randomly * selects a sample of matching groups from the existing matching results, and applies the @@ -643,9 +730,6 @@ export class CustomerProfiles extends CustomerProfilesClient { *

  • *

    FullName

    *
  • - *
  • - *

    BusinessName

    - *
  • * *

    For example, two or more profiles—with spelling mistakes such as John Doe and Jhn Doe, or different casing * email addresses such as JOHN_DOE@ANYCOMPANY.COM and @@ -743,6 +827,64 @@ export class CustomerProfiles extends CustomerProfilesClient { } } + /** + *

    Get details of specified workflow.

    + */ + public getWorkflow(args: GetWorkflowCommandInput, options?: __HttpHandlerOptions): Promise; + public getWorkflow(args: GetWorkflowCommandInput, cb: (err: any, data?: GetWorkflowCommandOutput) => void): void; + public getWorkflow( + args: GetWorkflowCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetWorkflowCommandOutput) => void + ): void; + public getWorkflow( + args: GetWorkflowCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetWorkflowCommandOutput) => void), + cb?: (err: any, data?: GetWorkflowCommandOutput) => void + ): Promise | void { + const command = new GetWorkflowCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Get granular list of steps in workflow.

    + */ + public getWorkflowSteps( + args: GetWorkflowStepsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getWorkflowSteps( + args: GetWorkflowStepsCommandInput, + cb: (err: any, data?: GetWorkflowStepsCommandOutput) => void + ): void; + public getWorkflowSteps( + args: GetWorkflowStepsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetWorkflowStepsCommandOutput) => void + ): void; + public getWorkflowSteps( + args: GetWorkflowStepsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetWorkflowStepsCommandOutput) => void), + cb?: (err: any, data?: GetWorkflowStepsCommandOutput) => void + ): Promise | void { + const command = new GetWorkflowStepsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists all of the integrations associated to a specific URI in the AWS account.

    */ @@ -995,6 +1137,38 @@ export class CustomerProfiles extends CustomerProfilesClient { } } + /** + *

    Query to list all workflows.

    + */ + public listWorkflows( + args: ListWorkflowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listWorkflows( + args: ListWorkflowsCommandInput, + cb: (err: any, data?: ListWorkflowsCommandOutput) => void + ): void; + public listWorkflows( + args: ListWorkflowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListWorkflowsCommandOutput) => void + ): void; + public listWorkflows( + args: ListWorkflowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListWorkflowsCommandOutput) => void), + cb?: (err: any, data?: ListWorkflowsCommandOutput) => void + ): Promise | void { + const command = new ListWorkflowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Runs an AWS Lambda job that does the following:

    *
      diff --git a/clients/client-customer-profiles/src/CustomerProfilesClient.ts b/clients/client-customer-profiles/src/CustomerProfilesClient.ts index 8c064751e228..bdfec86d96d9 100644 --- a/clients/client-customer-profiles/src/CustomerProfilesClient.ts +++ b/clients/client-customer-profiles/src/CustomerProfilesClient.ts @@ -52,6 +52,10 @@ import { import { AddProfileKeyCommandInput, AddProfileKeyCommandOutput } from "./commands/AddProfileKeyCommand"; import { CreateDomainCommandInput, CreateDomainCommandOutput } from "./commands/CreateDomainCommand"; +import { + CreateIntegrationWorkflowCommandInput, + CreateIntegrationWorkflowCommandOutput, +} from "./commands/CreateIntegrationWorkflowCommand"; import { CreateProfileCommandInput, CreateProfileCommandOutput } from "./commands/CreateProfileCommand"; import { DeleteDomainCommandInput, DeleteDomainCommandOutput } from "./commands/DeleteDomainCommand"; import { DeleteIntegrationCommandInput, DeleteIntegrationCommandOutput } from "./commands/DeleteIntegrationCommand"; @@ -65,6 +69,7 @@ import { DeleteProfileObjectTypeCommandInput, DeleteProfileObjectTypeCommandOutput, } from "./commands/DeleteProfileObjectTypeCommand"; +import { DeleteWorkflowCommandInput, DeleteWorkflowCommandOutput } from "./commands/DeleteWorkflowCommand"; import { GetAutoMergingPreviewCommandInput, GetAutoMergingPreviewCommandOutput, @@ -84,6 +89,8 @@ import { GetProfileObjectTypeTemplateCommandInput, GetProfileObjectTypeTemplateCommandOutput, } from "./commands/GetProfileObjectTypeTemplateCommand"; +import { GetWorkflowCommandInput, GetWorkflowCommandOutput } from "./commands/GetWorkflowCommand"; +import { GetWorkflowStepsCommandInput, GetWorkflowStepsCommandOutput } from "./commands/GetWorkflowStepsCommand"; import { ListAccountIntegrationsCommandInput, ListAccountIntegrationsCommandOutput, @@ -107,6 +114,7 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "./commands/ListTagsForResourceCommand"; +import { ListWorkflowsCommandInput, ListWorkflowsCommandOutput } from "./commands/ListWorkflowsCommand"; import { MergeProfilesCommandInput, MergeProfilesCommandOutput } from "./commands/MergeProfilesCommand"; import { PutIntegrationCommandInput, PutIntegrationCommandOutput } from "./commands/PutIntegrationCommand"; import { PutProfileObjectCommandInput, PutProfileObjectCommandOutput } from "./commands/PutProfileObjectCommand"; @@ -124,6 +132,7 @@ import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; export type ServiceInputTypes = | AddProfileKeyCommandInput | CreateDomainCommandInput + | CreateIntegrationWorkflowCommandInput | CreateProfileCommandInput | DeleteDomainCommandInput | DeleteIntegrationCommandInput @@ -131,6 +140,7 @@ export type ServiceInputTypes = | DeleteProfileKeyCommandInput | DeleteProfileObjectCommandInput | DeleteProfileObjectTypeCommandInput + | DeleteWorkflowCommandInput | GetAutoMergingPreviewCommandInput | GetDomainCommandInput | GetIdentityResolutionJobCommandInput @@ -138,6 +148,8 @@ export type ServiceInputTypes = | GetMatchesCommandInput | GetProfileObjectTypeCommandInput | GetProfileObjectTypeTemplateCommandInput + | GetWorkflowCommandInput + | GetWorkflowStepsCommandInput | ListAccountIntegrationsCommandInput | ListDomainsCommandInput | ListIdentityResolutionJobsCommandInput @@ -146,6 +158,7 @@ export type ServiceInputTypes = | ListProfileObjectTypesCommandInput | ListProfileObjectsCommandInput | ListTagsForResourceCommandInput + | ListWorkflowsCommandInput | MergeProfilesCommandInput | PutIntegrationCommandInput | PutProfileObjectCommandInput @@ -159,6 +172,7 @@ export type ServiceInputTypes = export type ServiceOutputTypes = | AddProfileKeyCommandOutput | CreateDomainCommandOutput + | CreateIntegrationWorkflowCommandOutput | CreateProfileCommandOutput | DeleteDomainCommandOutput | DeleteIntegrationCommandOutput @@ -166,6 +180,7 @@ export type ServiceOutputTypes = | DeleteProfileKeyCommandOutput | DeleteProfileObjectCommandOutput | DeleteProfileObjectTypeCommandOutput + | DeleteWorkflowCommandOutput | GetAutoMergingPreviewCommandOutput | GetDomainCommandOutput | GetIdentityResolutionJobCommandOutput @@ -173,6 +188,8 @@ export type ServiceOutputTypes = | GetMatchesCommandOutput | GetProfileObjectTypeCommandOutput | GetProfileObjectTypeTemplateCommandOutput + | GetWorkflowCommandOutput + | GetWorkflowStepsCommandOutput | ListAccountIntegrationsCommandOutput | ListDomainsCommandOutput | ListIdentityResolutionJobsCommandOutput @@ -181,6 +198,7 @@ export type ServiceOutputTypes = | ListProfileObjectTypesCommandOutput | ListProfileObjectsCommandOutput | ListTagsForResourceCommandOutput + | ListWorkflowsCommandOutput | MergeProfilesCommandOutput | PutIntegrationCommandOutput | PutProfileObjectCommandOutput diff --git a/clients/client-customer-profiles/src/commands/CreateIntegrationWorkflowCommand.ts b/clients/client-customer-profiles/src/commands/CreateIntegrationWorkflowCommand.ts new file mode 100644 index 000000000000..4b32a3ad6ffd --- /dev/null +++ b/clients/client-customer-profiles/src/commands/CreateIntegrationWorkflowCommand.ts @@ -0,0 +1,100 @@ +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + MiddlewareStack, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +import { CustomerProfilesClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CustomerProfilesClient"; +import { CreateIntegrationWorkflowRequest, CreateIntegrationWorkflowResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateIntegrationWorkflowCommand, + serializeAws_restJson1CreateIntegrationWorkflowCommand, +} from "../protocols/Aws_restJson1"; + +export interface CreateIntegrationWorkflowCommandInput extends CreateIntegrationWorkflowRequest {} +export interface CreateIntegrationWorkflowCommandOutput extends CreateIntegrationWorkflowResponse, __MetadataBearer {} + +/** + *

      + * Creates an integration workflow. An integration workflow is an async process which ingests historic data and sets up an integration for ongoing updates. The supported Amazon AppFlow sources are Salesforce, ServiceNow, and Marketo. + *

      + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { CustomerProfilesClient, CreateIntegrationWorkflowCommand } from "@aws-sdk/client-customer-profiles"; // ES Modules import + * // const { CustomerProfilesClient, CreateIntegrationWorkflowCommand } = require("@aws-sdk/client-customer-profiles"); // CommonJS import + * const client = new CustomerProfilesClient(config); + * const command = new CreateIntegrationWorkflowCommand(input); + * const response = await client.send(command); + * ``` + * + * @see {@link CreateIntegrationWorkflowCommandInput} for command's `input` shape. + * @see {@link CreateIntegrationWorkflowCommandOutput} for command's `response` shape. + * @see {@link CustomerProfilesClientResolvedConfig | config} for CustomerProfilesClient's `config` shape. + * + */ +export class CreateIntegrationWorkflowCommand extends $Command< + CreateIntegrationWorkflowCommandInput, + CreateIntegrationWorkflowCommandOutput, + CustomerProfilesClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateIntegrationWorkflowCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: CustomerProfilesClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "CustomerProfilesClient"; + const commandName = "CreateIntegrationWorkflowCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateIntegrationWorkflowRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateIntegrationWorkflowResponse.filterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateIntegrationWorkflowCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateIntegrationWorkflowCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1CreateIntegrationWorkflowCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-customer-profiles/src/commands/DeleteWorkflowCommand.ts b/clients/client-customer-profiles/src/commands/DeleteWorkflowCommand.ts new file mode 100644 index 000000000000..f46a1707c4fc --- /dev/null +++ b/clients/client-customer-profiles/src/commands/DeleteWorkflowCommand.ts @@ -0,0 +1,95 @@ +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + MiddlewareStack, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +import { CustomerProfilesClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CustomerProfilesClient"; +import { DeleteWorkflowRequest, DeleteWorkflowResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteWorkflowCommand, + serializeAws_restJson1DeleteWorkflowCommand, +} from "../protocols/Aws_restJson1"; + +export interface DeleteWorkflowCommandInput extends DeleteWorkflowRequest {} +export interface DeleteWorkflowCommandOutput extends DeleteWorkflowResponse, __MetadataBearer {} + +/** + *

      Deletes the specified workflow and all its corresponding resources. This is an async process.

      + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { CustomerProfilesClient, DeleteWorkflowCommand } from "@aws-sdk/client-customer-profiles"; // ES Modules import + * // const { CustomerProfilesClient, DeleteWorkflowCommand } = require("@aws-sdk/client-customer-profiles"); // CommonJS import + * const client = new CustomerProfilesClient(config); + * const command = new DeleteWorkflowCommand(input); + * const response = await client.send(command); + * ``` + * + * @see {@link DeleteWorkflowCommandInput} for command's `input` shape. + * @see {@link DeleteWorkflowCommandOutput} for command's `response` shape. + * @see {@link CustomerProfilesClientResolvedConfig | config} for CustomerProfilesClient's `config` shape. + * + */ +export class DeleteWorkflowCommand extends $Command< + DeleteWorkflowCommandInput, + DeleteWorkflowCommandOutput, + CustomerProfilesClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteWorkflowCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: CustomerProfilesClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "CustomerProfilesClient"; + const commandName = "DeleteWorkflowCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteWorkflowRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteWorkflowResponse.filterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteWorkflowCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteWorkflowCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteWorkflowCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-customer-profiles/src/commands/GetMatchesCommand.ts b/clients/client-customer-profiles/src/commands/GetMatchesCommand.ts index f3722b932cc5..cea9a3773220 100644 --- a/clients/client-customer-profiles/src/commands/GetMatchesCommand.ts +++ b/clients/client-customer-profiles/src/commands/GetMatchesCommand.ts @@ -63,9 +63,6 @@ export interface GetMatchesCommandOutput extends GetMatchesResponse, __MetadataB *
    1. *

      FullName

      *
    2. - *
    3. - *

      BusinessName

      - *
    4. * *

      For example, two or more profiles—with spelling mistakes such as John Doe and Jhn Doe, or different casing * email addresses such as JOHN_DOE@ANYCOMPANY.COM and diff --git a/clients/client-customer-profiles/src/commands/GetWorkflowCommand.ts b/clients/client-customer-profiles/src/commands/GetWorkflowCommand.ts new file mode 100644 index 000000000000..9b2c1d54a1fa --- /dev/null +++ b/clients/client-customer-profiles/src/commands/GetWorkflowCommand.ts @@ -0,0 +1,95 @@ +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + MiddlewareStack, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +import { CustomerProfilesClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CustomerProfilesClient"; +import { GetWorkflowRequest, GetWorkflowResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetWorkflowCommand, + serializeAws_restJson1GetWorkflowCommand, +} from "../protocols/Aws_restJson1"; + +export interface GetWorkflowCommandInput extends GetWorkflowRequest {} +export interface GetWorkflowCommandOutput extends GetWorkflowResponse, __MetadataBearer {} + +/** + *

      Get details of specified workflow.

      + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { CustomerProfilesClient, GetWorkflowCommand } from "@aws-sdk/client-customer-profiles"; // ES Modules import + * // const { CustomerProfilesClient, GetWorkflowCommand } = require("@aws-sdk/client-customer-profiles"); // CommonJS import + * const client = new CustomerProfilesClient(config); + * const command = new GetWorkflowCommand(input); + * const response = await client.send(command); + * ``` + * + * @see {@link GetWorkflowCommandInput} for command's `input` shape. + * @see {@link GetWorkflowCommandOutput} for command's `response` shape. + * @see {@link CustomerProfilesClientResolvedConfig | config} for CustomerProfilesClient's `config` shape. + * + */ +export class GetWorkflowCommand extends $Command< + GetWorkflowCommandInput, + GetWorkflowCommandOutput, + CustomerProfilesClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetWorkflowCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: CustomerProfilesClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "CustomerProfilesClient"; + const commandName = "GetWorkflowCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetWorkflowRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetWorkflowResponse.filterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetWorkflowCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetWorkflowCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetWorkflowCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-customer-profiles/src/commands/GetWorkflowStepsCommand.ts b/clients/client-customer-profiles/src/commands/GetWorkflowStepsCommand.ts new file mode 100644 index 000000000000..9e400b34b240 --- /dev/null +++ b/clients/client-customer-profiles/src/commands/GetWorkflowStepsCommand.ts @@ -0,0 +1,95 @@ +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + MiddlewareStack, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +import { CustomerProfilesClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CustomerProfilesClient"; +import { GetWorkflowStepsRequest, GetWorkflowStepsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetWorkflowStepsCommand, + serializeAws_restJson1GetWorkflowStepsCommand, +} from "../protocols/Aws_restJson1"; + +export interface GetWorkflowStepsCommandInput extends GetWorkflowStepsRequest {} +export interface GetWorkflowStepsCommandOutput extends GetWorkflowStepsResponse, __MetadataBearer {} + +/** + *

      Get granular list of steps in workflow.

      + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { CustomerProfilesClient, GetWorkflowStepsCommand } from "@aws-sdk/client-customer-profiles"; // ES Modules import + * // const { CustomerProfilesClient, GetWorkflowStepsCommand } = require("@aws-sdk/client-customer-profiles"); // CommonJS import + * const client = new CustomerProfilesClient(config); + * const command = new GetWorkflowStepsCommand(input); + * const response = await client.send(command); + * ``` + * + * @see {@link GetWorkflowStepsCommandInput} for command's `input` shape. + * @see {@link GetWorkflowStepsCommandOutput} for command's `response` shape. + * @see {@link CustomerProfilesClientResolvedConfig | config} for CustomerProfilesClient's `config` shape. + * + */ +export class GetWorkflowStepsCommand extends $Command< + GetWorkflowStepsCommandInput, + GetWorkflowStepsCommandOutput, + CustomerProfilesClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetWorkflowStepsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: CustomerProfilesClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "CustomerProfilesClient"; + const commandName = "GetWorkflowStepsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetWorkflowStepsRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetWorkflowStepsResponse.filterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetWorkflowStepsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetWorkflowStepsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetWorkflowStepsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-customer-profiles/src/commands/ListWorkflowsCommand.ts b/clients/client-customer-profiles/src/commands/ListWorkflowsCommand.ts new file mode 100644 index 000000000000..437e4db3fea4 --- /dev/null +++ b/clients/client-customer-profiles/src/commands/ListWorkflowsCommand.ts @@ -0,0 +1,95 @@ +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + MiddlewareStack, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +import { CustomerProfilesClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../CustomerProfilesClient"; +import { ListWorkflowsRequest, ListWorkflowsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListWorkflowsCommand, + serializeAws_restJson1ListWorkflowsCommand, +} from "../protocols/Aws_restJson1"; + +export interface ListWorkflowsCommandInput extends ListWorkflowsRequest {} +export interface ListWorkflowsCommandOutput extends ListWorkflowsResponse, __MetadataBearer {} + +/** + *

      Query to list all workflows.

      + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { CustomerProfilesClient, ListWorkflowsCommand } from "@aws-sdk/client-customer-profiles"; // ES Modules import + * // const { CustomerProfilesClient, ListWorkflowsCommand } = require("@aws-sdk/client-customer-profiles"); // CommonJS import + * const client = new CustomerProfilesClient(config); + * const command = new ListWorkflowsCommand(input); + * const response = await client.send(command); + * ``` + * + * @see {@link ListWorkflowsCommandInput} for command's `input` shape. + * @see {@link ListWorkflowsCommandOutput} for command's `response` shape. + * @see {@link CustomerProfilesClientResolvedConfig | config} for CustomerProfilesClient's `config` shape. + * + */ +export class ListWorkflowsCommand extends $Command< + ListWorkflowsCommandInput, + ListWorkflowsCommandOutput, + CustomerProfilesClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListWorkflowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: CustomerProfilesClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "CustomerProfilesClient"; + const commandName = "ListWorkflowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListWorkflowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListWorkflowsResponse.filterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListWorkflowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListWorkflowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListWorkflowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-customer-profiles/src/commands/index.ts b/clients/client-customer-profiles/src/commands/index.ts index 076094db271a..4ac43cdcff22 100644 --- a/clients/client-customer-profiles/src/commands/index.ts +++ b/clients/client-customer-profiles/src/commands/index.ts @@ -1,5 +1,6 @@ export * from "./AddProfileKeyCommand"; export * from "./CreateDomainCommand"; +export * from "./CreateIntegrationWorkflowCommand"; export * from "./CreateProfileCommand"; export * from "./DeleteDomainCommand"; export * from "./DeleteIntegrationCommand"; @@ -7,6 +8,7 @@ export * from "./DeleteProfileCommand"; export * from "./DeleteProfileKeyCommand"; export * from "./DeleteProfileObjectCommand"; export * from "./DeleteProfileObjectTypeCommand"; +export * from "./DeleteWorkflowCommand"; export * from "./GetAutoMergingPreviewCommand"; export * from "./GetDomainCommand"; export * from "./GetIdentityResolutionJobCommand"; @@ -14,6 +16,8 @@ export * from "./GetIntegrationCommand"; export * from "./GetMatchesCommand"; export * from "./GetProfileObjectTypeCommand"; export * from "./GetProfileObjectTypeTemplateCommand"; +export * from "./GetWorkflowCommand"; +export * from "./GetWorkflowStepsCommand"; export * from "./ListAccountIntegrationsCommand"; export * from "./ListDomainsCommand"; export * from "./ListIdentityResolutionJobsCommand"; @@ -22,6 +26,7 @@ export * from "./ListProfileObjectTypeTemplatesCommand"; export * from "./ListProfileObjectTypesCommand"; export * from "./ListProfileObjectsCommand"; export * from "./ListTagsForResourceCommand"; +export * from "./ListWorkflowsCommand"; export * from "./MergeProfilesCommand"; export * from "./PutIntegrationCommand"; export * from "./PutProfileObjectCommand"; diff --git a/clients/client-customer-profiles/src/models/models_0.ts b/clients/client-customer-profiles/src/models/models_0.ts index f7f5c4435403..ba6c4faeb4fc 100644 --- a/clients/client-customer-profiles/src/models/models_0.ts +++ b/clients/client-customer-profiles/src/models/models_0.ts @@ -225,97 +225,245 @@ export namespace Address { }); } -export enum ConflictResolvingModel { - RECENCY = "RECENCY", - SOURCE = "SOURCE", +/** + *

      Batch defines the boundaries for ingestion for each step in APPFLOW_INTEGRATION workflow. APPFLOW_INTEGRATION workflow splits ingestion based on these boundaries.

      + */ +export interface Batch { + /** + *

      Start time of batch to split ingestion.

      + */ + StartTime: Date | undefined; + + /** + *

      End time of batch to split ingestion.

      + */ + EndTime: Date | undefined; +} + +export namespace Batch { + /** + * @internal + */ + export const filterSensitiveLog = (obj: Batch): any => ({ + ...obj, + }); +} + +export enum SourceConnectorType { + MARKETO = "Marketo", + S3 = "S3", + SALESFORCE = "Salesforce", + SERVICENOW = "Servicenow", + ZENDESK = "Zendesk", } /** - *

      How the auto-merging process should resolve conflicts between different profiles.

      + *

      Specifies the configuration used when importing incremental records from the + * source.

      */ -export interface ConflictResolution { +export interface IncrementalPullConfig { /** - *

      How the auto-merging process should resolve conflicts between different profiles.

      - *
        - *
      • - *

        - * RECENCY: Uses the data that was most recently updated.

        - *
      • - *
      • - *

        - * SOURCE: Uses the data from a specific source. For example, if a - * company has been aquired or two departments have merged, data from the specified - * source is used. If two duplicate profiles are from the same source, then - * RECENCY is used again.

        - *
      • - *
      + *

      A field that specifies the date time or timestamp field as the criteria to use when + * importing incremental records from the source.

      */ - ConflictResolvingModel: ConflictResolvingModel | string | undefined; + DatetimeTypeFieldName?: string; +} +export namespace IncrementalPullConfig { /** - *

      The ObjectType name that is used to resolve profile merging conflicts when - * choosing SOURCE as the ConflictResolvingModel.

      + * @internal */ - SourceName?: string; + export const filterSensitiveLog = (obj: IncrementalPullConfig): any => ({ + ...obj, + }); } -export namespace ConflictResolution { +/** + *

      The properties that are applied when Marketo is being used as a source.

      + */ +export interface MarketoSourceProperties { + /** + *

      The object specified in the Marketo flow source.

      + */ + Object: string | undefined; +} + +export namespace MarketoSourceProperties { /** * @internal */ - export const filterSensitiveLog = (obj: ConflictResolution): any => ({ + export const filterSensitiveLog = (obj: MarketoSourceProperties): any => ({ ...obj, }); } /** - *

      The matching criteria to be used during the auto-merging process.

      + *

      The properties that are applied when Amazon S3 is being used as the flow source.

      */ -export interface Consolidation { +export interface S3SourceProperties { /** - *

      A list of matching criteria.

      + *

      The Amazon S3 bucket name where the source files are stored.

      */ - MatchingAttributesList: string[][] | undefined; + BucketName: string | undefined; + + /** + *

      The object key for the Amazon S3 bucket in which the source files are stored.

      + */ + BucketPrefix?: string; } -export namespace Consolidation { +export namespace S3SourceProperties { /** * @internal */ - export const filterSensitiveLog = (obj: Consolidation): any => ({ + export const filterSensitiveLog = (obj: S3SourceProperties): any => ({ ...obj, }); } /** - *

      Configuration settings for how to perform the auto-merging of profiles.

      + *

      The properties that are applied when Salesforce is being used as a source.

      */ -export interface AutoMerging { +export interface SalesforceSourceProperties { /** - *

      The flag that enables the auto-merging of duplicate profiles.

      + *

      The object specified in the Salesforce flow source.

      */ - Enabled: boolean | undefined; + Object: string | undefined; /** - *

      A list of matching attributes that represent matching criteria. If two profiles meet at - * least one of the requirements in the matching attributes list, they will be merged.

      + *

      The flag that enables dynamic fetching of new (recently added) fields in the Salesforce + * objects while running a flow.

      */ - Consolidation?: Consolidation; + EnableDynamicFieldUpdate?: boolean; /** - *

      How the auto-merging process should resolve conflicts between different profiles. For - * example, if Profile A and Profile B have the same FirstName and - * LastName (and that is the matching criteria), which - * EmailAddress should be used?

      + *

      Indicates whether Amazon AppFlow includes deleted files in the flow run.

      */ - ConflictResolution?: ConflictResolution; + IncludeDeletedRecords?: boolean; } -export namespace AutoMerging { +export namespace SalesforceSourceProperties { /** * @internal */ - export const filterSensitiveLog = (obj: AutoMerging): any => ({ + export const filterSensitiveLog = (obj: SalesforceSourceProperties): any => ({ + ...obj, + }); +} + +/** + *

      The properties that are applied when ServiceNow is being used as a source.

      + */ +export interface ServiceNowSourceProperties { + /** + *

      The object specified in the ServiceNow flow source.

      + */ + Object: string | undefined; +} + +export namespace ServiceNowSourceProperties { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ServiceNowSourceProperties): any => ({ + ...obj, + }); +} + +/** + *

      The properties that are applied when using Zendesk as a flow source.

      + */ +export interface ZendeskSourceProperties { + /** + *

      The object specified in the Zendesk flow source.

      + */ + Object: string | undefined; +} + +export namespace ZendeskSourceProperties { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ZendeskSourceProperties): any => ({ + ...obj, + }); +} + +/** + *

      Specifies the information that is required to query a particular Amazon AppFlow connector. + * Customer Profiles supports Salesforce, Zendesk, Marketo, ServiceNow and Amazon S3.

      + */ +export interface SourceConnectorProperties { + /** + *

      The properties that are applied when Marketo is being used as a source.

      + */ + Marketo?: MarketoSourceProperties; + + /** + *

      The properties that are applied when Amazon S3 is being used as the flow source.

      + */ + S3?: S3SourceProperties; + + /** + *

      The properties that are applied when Salesforce is being used as a source.

      + */ + Salesforce?: SalesforceSourceProperties; + + /** + *

      The properties that are applied when ServiceNow is being used as a source.

      + */ + ServiceNow?: ServiceNowSourceProperties; + + /** + *

      The properties that are applied when using Zendesk as a flow source.

      + */ + Zendesk?: ZendeskSourceProperties; +} + +export namespace SourceConnectorProperties { + /** + * @internal + */ + export const filterSensitiveLog = (obj: SourceConnectorProperties): any => ({ + ...obj, + }); +} + +/** + *

      Contains information about the configuration of the source connector used in the + * flow.

      + */ +export interface SourceFlowConfig { + /** + *

      The name of the AppFlow connector profile. This name must be unique for each connector + * profile in the AWS account.

      + */ + ConnectorProfileName?: string; + + /** + *

      The type of connector, such as Salesforce, Marketo, and so on.

      + */ + ConnectorType: SourceConnectorType | string | undefined; + + /** + *

      Defines the configuration for a scheduled incremental data pull. If a valid + * configuration is provided, the fields specified in the configuration are used when querying + * for the incremental data pull.

      + */ + IncrementalPullConfig?: IncrementalPullConfig; + + /** + *

      Specifies the information that is required to query a particular source + * connector.

      + */ + SourceConnectorProperties: SourceConnectorProperties | undefined; +} + +export namespace SourceFlowConfig { + /** + * @internal + */ + export const filterSensitiveLog = (obj: SourceFlowConfig): any => ({ ...obj, }); } @@ -466,17 +614,490 @@ export namespace ConnectorOperator { }); } -/** - *

      Configuration information about the S3 bucket where Identity Resolution Jobs write result files.

      - */ -export interface S3ExportingConfig { - /** - *

      The name of the S3 bucket where Identity Resolution Jobs write result files.

      - */ - S3BucketName: string | undefined; +export enum OperatorPropertiesKeys { + CONCAT_FORMAT = "CONCAT_FORMAT", + DATA_TYPE = "DATA_TYPE", + DESTINATION_DATA_TYPE = "DESTINATION_DATA_TYPE", + LOWER_BOUND = "LOWER_BOUND", + MASK_LENGTH = "MASK_LENGTH", + MASK_VALUE = "MASK_VALUE", + MATH_OPERATION_FIELDS_ORDER = "MATH_OPERATION_FIELDS_ORDER", + SOURCE_DATA_TYPE = "SOURCE_DATA_TYPE", + SUBFIELD_CATEGORY_MAP = "SUBFIELD_CATEGORY_MAP", + TRUNCATE_LENGTH = "TRUNCATE_LENGTH", + UPPER_BOUND = "UPPER_BOUND", + VALIDATION_ACTION = "VALIDATION_ACTION", + VALUE = "VALUE", + VALUES = "VALUES", +} - /** - *

      The S3 key name of the location where Identity Resolution Jobs write result files.

      +export enum TaskType { + ARITHMETIC = "Arithmetic", + FILTER = "Filter", + MAP = "Map", + MASK = "Mask", + MERGE = "Merge", + TRUNCATE = "Truncate", + VALIDATE = "Validate", +} + +/** + *

      A class for modeling different type of tasks. Task implementation varies based on the + * TaskType.

      + */ +export interface Task { + /** + *

      The operation to be performed on the provided source fields.

      + */ + ConnectorOperator?: ConnectorOperator; + + /** + *

      A field in a destination connector, or a field value against which Amazon AppFlow validates a + * source field.

      + */ + DestinationField?: string; + + /** + *

      The source fields to which a particular task is applied.

      + */ + SourceFields: string[] | undefined; + + /** + *

      A map used to store task-related information. The service looks for particular + * information based on the TaskType.

      + */ + TaskProperties?: { [key: string]: string }; + + /** + *

      Specifies the particular task implementation that Amazon AppFlow performs.

      + */ + TaskType: TaskType | string | undefined; +} + +export namespace Task { + /** + * @internal + */ + export const filterSensitiveLog = (obj: Task): any => ({ + ...obj, + }); +} + +export enum DataPullMode { + COMPLETE = "Complete", + INCREMENTAL = "Incremental", +} + +/** + *

      Specifies the configuration details of a scheduled-trigger flow that you define. + * Currently, these settings only apply to the scheduled-trigger type.

      + */ +export interface ScheduledTriggerProperties { + /** + *

      The scheduling expression that determines the rate at which the schedule will run, for + * example rate (5 minutes).

      + */ + ScheduleExpression: string | undefined; + + /** + *

      Specifies whether a scheduled flow has an incremental data transfer or a complete data + * transfer for each flow run.

      + */ + DataPullMode?: DataPullMode | string; + + /** + *

      Specifies the scheduled start time for a scheduled-trigger flow.

      + */ + ScheduleStartTime?: Date; + + /** + *

      Specifies the scheduled end time for a scheduled-trigger flow.

      + */ + ScheduleEndTime?: Date; + + /** + *

      Specifies the time zone used when referring to the date and time of a + * scheduled-triggered flow, such as America/New_York.

      + */ + Timezone?: string; + + /** + *

      Specifies the optional offset that is added to the time interval for a + * schedule-triggered flow.

      + */ + ScheduleOffset?: number; + + /** + *

      Specifies the date range for the records to import from the connector in the first flow + * run.

      + */ + FirstExecutionFrom?: Date; +} + +export namespace ScheduledTriggerProperties { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ScheduledTriggerProperties): any => ({ + ...obj, + }); +} + +/** + *

      Specifies the configuration details that control the trigger for a flow. Currently, + * these settings only apply to the Scheduled trigger type.

      + */ +export interface TriggerProperties { + /** + *

      Specifies the configuration details of a schedule-triggered flow that you define.

      + */ + Scheduled?: ScheduledTriggerProperties; +} + +export namespace TriggerProperties { + /** + * @internal + */ + export const filterSensitiveLog = (obj: TriggerProperties): any => ({ + ...obj, + }); +} + +export enum TriggerType { + EVENT = "Event", + ONDEMAND = "OnDemand", + SCHEDULED = "Scheduled", +} + +/** + *

      The trigger settings that determine how and when Amazon AppFlow runs the specified + * flow.

      + */ +export interface TriggerConfig { + /** + *

      Specifies the type of flow trigger. It can be OnDemand, Scheduled, or Event.

      + */ + TriggerType: TriggerType | string | undefined; + + /** + *

      Specifies the configuration details of a schedule-triggered flow that you define. + * Currently, these settings only apply to the Scheduled trigger type.

      + */ + TriggerProperties?: TriggerProperties; +} + +export namespace TriggerConfig { + /** + * @internal + */ + export const filterSensitiveLog = (obj: TriggerConfig): any => ({ + ...obj, + }); +} + +/** + *

      The configurations that control how Customer Profiles retrieves data from the source, + * Amazon AppFlow. Customer Profiles uses this information to create an AppFlow flow on behalf of + * customers.

      + */ +export interface FlowDefinition { + /** + *

      A description of the flow you want to create.

      + */ + Description?: string; + + /** + *

      The specified name of the flow. Use underscores (_) or hyphens (-) only. Spaces are not + * allowed.

      + */ + FlowName: string | undefined; + + /** + *

      The Amazon Resource Name of the AWS Key Management Service (KMS) key you provide for encryption.

      + */ + KmsArn: string | undefined; + + /** + *

      The configuration that controls how Customer Profiles retrieves data from the + * source.

      + */ + SourceFlowConfig: SourceFlowConfig | undefined; + + /** + *

      A list of tasks that Customer Profiles performs while transferring the data in the flow + * run.

      + */ + Tasks: Task[] | undefined; + + /** + *

      The trigger settings that determine how and when the flow runs.

      + */ + TriggerConfig: TriggerConfig | undefined; +} + +export namespace FlowDefinition { + /** + * @internal + */ + export const filterSensitiveLog = (obj: FlowDefinition): any => ({ + ...obj, + }); +} + +/** + *

      Details for workflow of type APPFLOW_INTEGRATION.

      + */ +export interface AppflowIntegration { + /** + *

      The configurations that control how Customer Profiles retrieves data from the source, + * Amazon AppFlow. Customer Profiles uses this information to create an AppFlow flow on behalf of + * customers.

      + */ + FlowDefinition: FlowDefinition | undefined; + + /** + *

      Batches in workflow of type APPFLOW_INTEGRATION.

      + */ + Batches?: Batch[]; +} + +export namespace AppflowIntegration { + /** + * @internal + */ + export const filterSensitiveLog = (obj: AppflowIntegration): any => ({ + ...obj, + }); +} + +/** + *

      Structure holding all APPFLOW_INTEGRATION specific workflow attributes.

      + */ +export interface AppflowIntegrationWorkflowAttributes { + /** + *

      Specifies the source connector type, such as Salesforce, ServiceNow, and Marketo. Indicates source of ingestion.

      + */ + SourceConnectorType: SourceConnectorType | string | undefined; + + /** + *

      The name of the AppFlow connector profile used for ingestion.

      + */ + ConnectorProfileName: string | undefined; + + /** + *

      The Amazon Resource Name (ARN) of the IAM role. Customer Profiles assumes this role to create resources on your behalf as part of workflow execution.

      + */ + RoleArn?: string; +} + +export namespace AppflowIntegrationWorkflowAttributes { + /** + * @internal + */ + export const filterSensitiveLog = (obj: AppflowIntegrationWorkflowAttributes): any => ({ + ...obj, + }); +} + +/** + *

      Workflow specific execution metrics for APPFLOW_INTEGRATION workflow.

      + */ +export interface AppflowIntegrationWorkflowMetrics { + /** + *

      Number of records processed in APPFLOW_INTEGRATION workflow.

      + */ + RecordsProcessed: number | undefined; + + /** + *

      Total steps completed in APPFLOW_INTEGRATION workflow.

      + */ + StepsCompleted: number | undefined; + + /** + *

      Total steps in APPFLOW_INTEGRATION workflow.

      + */ + TotalSteps: number | undefined; +} + +export namespace AppflowIntegrationWorkflowMetrics { + /** + * @internal + */ + export const filterSensitiveLog = (obj: AppflowIntegrationWorkflowMetrics): any => ({ + ...obj, + }); +} + +export enum Status { + CANCELLED = "CANCELLED", + COMPLETE = "COMPLETE", + FAILED = "FAILED", + IN_PROGRESS = "IN_PROGRESS", + NOT_STARTED = "NOT_STARTED", + RETRY = "RETRY", + SPLIT = "SPLIT", +} + +/** + *

      Workflow step details for APPFLOW_INTEGRATION workflow.

      + */ +export interface AppflowIntegrationWorkflowStep { + /** + *

      Name of the flow created during execution of workflow step. APPFLOW_INTEGRATION workflow type creates an appflow flow during workflow step execution on the customers behalf.

      + */ + FlowName: string | undefined; + + /** + *

      Workflow step status for APPFLOW_INTEGRATION workflow.

      + */ + Status: Status | string | undefined; + + /** + *

      Message indicating execution of workflow step for APPFLOW_INTEGRATION workflow.

      + */ + ExecutionMessage: string | undefined; + + /** + *

      Total number of records processed during execution of workflow step for APPFLOW_INTEGRATION workflow.

      + */ + RecordsProcessed: number | undefined; + + /** + *

      Start datetime of records pulled in batch during execution of workflow step for APPFLOW_INTEGRATION workflow.

      + */ + BatchRecordsStartTime: string | undefined; + + /** + *

      End datetime of records pulled in batch during execution of workflow step for APPFLOW_INTEGRATION workflow.

      + */ + BatchRecordsEndTime: string | undefined; + + /** + *

      Creation timestamp of workflow step for APPFLOW_INTEGRATION workflow.

      + */ + CreatedAt: Date | undefined; + + /** + *

      Last updated timestamp for workflow step for APPFLOW_INTEGRATION workflow.

      + */ + LastUpdatedAt: Date | undefined; +} + +export namespace AppflowIntegrationWorkflowStep { + /** + * @internal + */ + export const filterSensitiveLog = (obj: AppflowIntegrationWorkflowStep): any => ({ + ...obj, + }); +} + +export enum ConflictResolvingModel { + RECENCY = "RECENCY", + SOURCE = "SOURCE", +} + +/** + *

      How the auto-merging process should resolve conflicts between different profiles.

      + */ +export interface ConflictResolution { + /** + *

      How the auto-merging process should resolve conflicts between different profiles.

      + *
        + *
      • + *

        + * RECENCY: Uses the data that was most recently updated.

        + *
      • + *
      • + *

        + * SOURCE: Uses the data from a specific source. For example, if a + * company has been aquired or two departments have merged, data from the specified + * source is used. If two duplicate profiles are from the same source, then + * RECENCY is used again.

        + *
      • + *
      + */ + ConflictResolvingModel: ConflictResolvingModel | string | undefined; + + /** + *

      The ObjectType name that is used to resolve profile merging conflicts when + * choosing SOURCE as the ConflictResolvingModel.

      + */ + SourceName?: string; +} + +export namespace ConflictResolution { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ConflictResolution): any => ({ + ...obj, + }); +} + +/** + *

      The matching criteria to be used during the auto-merging process.

      + */ +export interface Consolidation { + /** + *

      A list of matching criteria.

      + */ + MatchingAttributesList: string[][] | undefined; +} + +export namespace Consolidation { + /** + * @internal + */ + export const filterSensitiveLog = (obj: Consolidation): any => ({ + ...obj, + }); +} + +/** + *

      Configuration settings for how to perform the auto-merging of profiles.

      + */ +export interface AutoMerging { + /** + *

      The flag that enables the auto-merging of duplicate profiles.

      + */ + Enabled: boolean | undefined; + + /** + *

      A list of matching attributes that represent matching criteria. If two profiles meet at + * least one of the requirements in the matching attributes list, they will be merged.

      + */ + Consolidation?: Consolidation; + + /** + *

      How the auto-merging process should resolve conflicts between different profiles. For + * example, if Profile A and Profile B have the same FirstName and + * LastName (and that is the matching criteria), which + * EmailAddress should be used?

      + */ + ConflictResolution?: ConflictResolution; +} + +export namespace AutoMerging { + /** + * @internal + */ + export const filterSensitiveLog = (obj: AutoMerging): any => ({ + ...obj, + }); +} + +/** + *

      Configuration information about the S3 bucket where Identity Resolution Jobs write result files.

      + */ +export interface S3ExportingConfig { + /** + *

      The name of the S3 bucket where Identity Resolution Jobs write result files.

      + */ + S3BucketName: string | undefined; + + /** + *

      The S3 key name of the location where Identity Resolution Jobs write result files.

      */ S3KeyName?: string; } @@ -650,82 +1271,167 @@ export interface MatchingResponse { JobSchedule?: JobSchedule; /** - *

      Configuration information about the auto-merging process.

      + *

      Configuration information about the auto-merging process.

      + */ + AutoMerging?: AutoMerging; + + /** + *

      Configuration information for exporting Identity Resolution results, for example, to an S3 + * bucket.

      + */ + ExportingConfig?: ExportingConfig; +} + +export namespace MatchingResponse { + /** + * @internal + */ + export const filterSensitiveLog = (obj: MatchingResponse): any => ({ + ...obj, + }); +} + +export interface CreateDomainResponse { + /** + *

      The unique name of the domain.

      + */ + DomainName: string | undefined; + + /** + *

      The default number of days until the data within the domain expires.

      + */ + DefaultExpirationDays: number | undefined; + + /** + *

      The default encryption key, which is an AWS managed key, is used when no specific type + * of encryption key is specified. It is used to encrypt all data before it is placed in + * permanent or semi-permanent storage.

      + */ + DefaultEncryptionKey?: string; + + /** + *

      The URL of the SQS dead letter queue, which is used for reporting errors associated with + * ingesting data from third party applications.

      + */ + DeadLetterQueueUrl?: string; + + /** + *

      The process of matching duplicate profiles. If Matching = true, Amazon Connect Customer Profiles starts a weekly + * batch process called Identity Resolution Job. If you do not specify a date and time for Identity Resolution Job to run, by default it runs every + * Saturday at 12AM UTC to detect duplicate profiles in your domains.

      + *

      After the Identity Resolution Job completes, use the + * GetMatches + * API to return and review the results. Or, if you have configured ExportingConfig in the MatchingRequest, you can download the results from + * S3.

      + */ + Matching?: MatchingResponse; + + /** + *

      The timestamp of when the domain was created.

      + */ + CreatedAt: Date | undefined; + + /** + *

      The timestamp of when the domain was most recently edited.

      + */ + LastUpdatedAt: Date | undefined; + + /** + *

      The tags used to organize, track, or control access for this resource.

      + */ + Tags?: { [key: string]: string }; +} + +export namespace CreateDomainResponse { + /** + * @internal */ - AutoMerging?: AutoMerging; + export const filterSensitiveLog = (obj: CreateDomainResponse): any => ({ + ...obj, + }); +} +/** + *

      Configuration data for integration workflow.

      + */ +export interface IntegrationConfig { /** - *

      Configuration information for exporting Identity Resolution results, for example, to an S3 - * bucket.

      + *

      Configuration data for APPFLOW_INTEGRATION workflow type.

      */ - ExportingConfig?: ExportingConfig; + AppflowIntegration?: AppflowIntegration; } -export namespace MatchingResponse { +export namespace IntegrationConfig { /** * @internal */ - export const filterSensitiveLog = (obj: MatchingResponse): any => ({ + export const filterSensitiveLog = (obj: IntegrationConfig): any => ({ ...obj, }); } -export interface CreateDomainResponse { +export enum WorkflowType { + APPFLOW_INTEGRATION = "APPFLOW_INTEGRATION", +} + +export interface CreateIntegrationWorkflowRequest { /** *

      The unique name of the domain.

      */ DomainName: string | undefined; /** - *

      The default number of days until the data within the domain expires.

      + *

      The type of workflow. The only supported value is APPFLOW_INTEGRATION.

      */ - DefaultExpirationDays: number | undefined; + WorkflowType: WorkflowType | string | undefined; /** - *

      The default encryption key, which is an AWS managed key, is used when no specific type - * of encryption key is specified. It is used to encrypt all data before it is placed in - * permanent or semi-permanent storage.

      + *

      Configuration data for integration workflow.

      */ - DefaultEncryptionKey?: string; + IntegrationConfig: IntegrationConfig | undefined; /** - *

      The URL of the SQS dead letter queue, which is used for reporting errors associated with - * ingesting data from third party applications.

      + *

      The name of the profile object type.

      */ - DeadLetterQueueUrl?: string; + ObjectTypeName: string | undefined; /** - *

      The process of matching duplicate profiles. If Matching = true, Amazon Connect Customer Profiles starts a weekly - * batch process called Identity Resolution Job. If you do not specify a date and time for Identity Resolution Job to run, by default it runs every - * Saturday at 12AM UTC to detect duplicate profiles in your domains.

      - *

      After the Identity Resolution Job completes, use the - * GetMatches - * API to return and review the results. Or, if you have configured ExportingConfig in the MatchingRequest, you can download the results from - * S3.

      + *

      The Amazon Resource Name (ARN) of the IAM role. Customer Profiles assumes this role to create resources on your behalf as part of workflow execution.

      */ - Matching?: MatchingResponse; + RoleArn: string | undefined; /** - *

      The timestamp of when the domain was created.

      + *

      The tags used to organize, track, or control access for this resource.

      */ - CreatedAt: Date | undefined; + Tags?: { [key: string]: string }; +} +export namespace CreateIntegrationWorkflowRequest { /** - *

      The timestamp of when the domain was most recently edited.

      + * @internal */ - LastUpdatedAt: Date | undefined; + export const filterSensitiveLog = (obj: CreateIntegrationWorkflowRequest): any => ({ + ...obj, + }); +} +export interface CreateIntegrationWorkflowResponse { /** - *

      The tags used to organize, track, or control access for this resource.

      + *

      Unique identifier for the workflow.

      */ - Tags?: { [key: string]: string }; + WorkflowId: string | undefined; + + /** + *

      A message indicating create request was received.

      + */ + Message: string | undefined; } -export namespace CreateDomainResponse { +export namespace CreateIntegrationWorkflowResponse { /** * @internal */ - export const filterSensitiveLog = (obj: CreateDomainResponse): any => ({ + export const filterSensitiveLog = (obj: CreateIntegrationWorkflowResponse): any => ({ ...obj, }); } @@ -1119,6 +1825,38 @@ export namespace DeleteProfileObjectTypeResponse { }); } +export interface DeleteWorkflowRequest { + /** + *

      The unique name of the domain.

      + */ + DomainName: string | undefined; + + /** + *

      Unique identifier for the workflow.

      + */ + WorkflowId: string | undefined; +} + +export namespace DeleteWorkflowRequest { + /** + * @internal + */ + export const filterSensitiveLog = (obj: DeleteWorkflowRequest): any => ({ + ...obj, + }); +} + +export interface DeleteWorkflowResponse {} + +export namespace DeleteWorkflowResponse { + /** + * @internal + */ + export const filterSensitiveLog = (obj: DeleteWorkflowResponse): any => ({ + ...obj, + }); +} + export interface GetAutoMergingPreviewRequest { /** *

      The unique name of the domain.

      @@ -1556,6 +2294,11 @@ export interface GetIntegrationResponse { * ShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

      */ ObjectTypeNames?: { [key: string]: string }; + + /** + *

      Unique identifier for the workflow.

      + */ + WorkflowId?: string; } export namespace GetIntegrationResponse { @@ -1853,445 +2596,255 @@ export namespace GetProfileObjectTypeTemplateRequest { export interface GetProfileObjectTypeTemplateResponse { /** - *

      A unique identifier for the object template.

      - */ - TemplateId?: string; - - /** - *

      The name of the source of the object template.

      - */ - SourceName?: string; - - /** - *

      The source of the object template.

      - */ - SourceObject?: string; - - /** - *

      Indicates whether a profile should be created when data is received if one doesn’t exist - * for an object of this type. The default is FALSE. If the AllowProfileCreation - * flag is set to FALSE, then the service tries to fetch a standard profile and - * associate this object with the profile. If it is set to TRUE, and if no match - * is found, then the service creates a new standard profile.

      - */ - AllowProfileCreation?: boolean; - - /** - *

      The format of your sourceLastUpdatedTimestamp that was previously set - * up.

      - */ - SourceLastUpdatedTimestampFormat?: string; - - /** - *

      A map of the name and ObjectType field.

      - */ - Fields?: { [key: string]: ObjectTypeField }; - - /** - *

      A list of unique keys that can be used to map data to the profile.

      - */ - Keys?: { [key: string]: ObjectTypeKey[] }; -} - -export namespace GetProfileObjectTypeTemplateResponse { - /** - * @internal - */ - export const filterSensitiveLog = (obj: GetProfileObjectTypeTemplateResponse): any => ({ - ...obj, - }); -} - -export interface ListAccountIntegrationsRequest { - /** - *

      The URI of the S3 bucket or any other type of data source.

      - */ - Uri: string | undefined; - - /** - *

      The pagination token from the previous ListAccountIntegrations API call.

      - */ - NextToken?: string; - - /** - *

      The maximum number of objects returned per page.

      - */ - MaxResults?: number; -} - -export namespace ListAccountIntegrationsRequest { - /** - * @internal - */ - export const filterSensitiveLog = (obj: ListAccountIntegrationsRequest): any => ({ - ...obj, - }); -} - -/** - *

      An integration in list of integrations.

      - */ -export interface ListIntegrationItem { - /** - *

      The unique name of the domain.

      - */ - DomainName: string | undefined; - - /** - *

      The URI of the S3 bucket or any other type of data source.

      - */ - Uri: string | undefined; - - /** - *

      The name of the profile object type.

      - */ - ObjectTypeName?: string; - - /** - *

      The timestamp of when the domain was created.

      - */ - CreatedAt: Date | undefined; - - /** - *

      The timestamp of when the domain was most recently edited.

      - */ - LastUpdatedAt: Date | undefined; - - /** - *

      The tags used to organize, track, or control access for this resource.

      - */ - Tags?: { [key: string]: string }; - - /** - *

      A map in which each key is an event type from an external application such as Segment or Shopify, and each value is an ObjectTypeName (template) used to ingest the event. - * It supports the following event types: SegmentIdentify, ShopifyCreateCustomers, ShopifyUpdateCustomers, ShopifyCreateDraftOrders, - * ShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

      - */ - ObjectTypeNames?: { [key: string]: string }; -} - -export namespace ListIntegrationItem { - /** - * @internal - */ - export const filterSensitiveLog = (obj: ListIntegrationItem): any => ({ - ...obj, - }); -} - -export interface ListAccountIntegrationsResponse { - /** - *

      The list of ListAccountIntegration instances.

      - */ - Items?: ListIntegrationItem[]; - - /** - *

      The pagination token from the previous ListAccountIntegrations API call.

      - */ - NextToken?: string; -} - -export namespace ListAccountIntegrationsResponse { - /** - * @internal - */ - export const filterSensitiveLog = (obj: ListAccountIntegrationsResponse): any => ({ - ...obj, - }); -} - -export interface ListDomainsRequest { - /** - *

      The pagination token from the previous ListDomain API call.

      - */ - NextToken?: string; - - /** - *

      The maximum number of objects returned per page.

      - */ - MaxResults?: number; -} - -export namespace ListDomainsRequest { - /** - * @internal - */ - export const filterSensitiveLog = (obj: ListDomainsRequest): any => ({ - ...obj, - }); -} - -/** - *

      An object in a list that represents a domain.

      - */ -export interface ListDomainItem { - /** - *

      The unique name of the domain.

      + *

      A unique identifier for the object template.

      */ - DomainName: string | undefined; + TemplateId?: string; /** - *

      The timestamp of when the domain was created.

      + *

      The name of the source of the object template.

      */ - CreatedAt: Date | undefined; + SourceName?: string; /** - *

      The timestamp of when the domain was most recently edited.

      + *

      The source of the object template.

      */ - LastUpdatedAt: Date | undefined; + SourceObject?: string; /** - *

      The tags used to organize, track, or control access for this resource.

      + *

      Indicates whether a profile should be created when data is received if one doesn’t exist + * for an object of this type. The default is FALSE. If the AllowProfileCreation + * flag is set to FALSE, then the service tries to fetch a standard profile and + * associate this object with the profile. If it is set to TRUE, and if no match + * is found, then the service creates a new standard profile.

      */ - Tags?: { [key: string]: string }; -} + AllowProfileCreation?: boolean; -export namespace ListDomainItem { /** - * @internal + *

      The format of your sourceLastUpdatedTimestamp that was previously set + * up.

      */ - export const filterSensitiveLog = (obj: ListDomainItem): any => ({ - ...obj, - }); -} + SourceLastUpdatedTimestampFormat?: string; -export interface ListDomainsResponse { /** - *

      The list of ListDomains instances.

      + *

      A map of the name and ObjectType field.

      */ - Items?: ListDomainItem[]; + Fields?: { [key: string]: ObjectTypeField }; /** - *

      The pagination token from the previous ListDomains API call.

      + *

      A list of unique keys that can be used to map data to the profile.

      */ - NextToken?: string; + Keys?: { [key: string]: ObjectTypeKey[] }; } -export namespace ListDomainsResponse { +export namespace GetProfileObjectTypeTemplateResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ListDomainsResponse): any => ({ + export const filterSensitiveLog = (obj: GetProfileObjectTypeTemplateResponse): any => ({ ...obj, }); } -export interface ListIdentityResolutionJobsRequest { +export interface GetWorkflowRequest { /** *

      The unique name of the domain.

      */ DomainName: string | undefined; /** - *

      The token for the next set of results. Use the value returned in the previous - * response in the next request to retrieve the next set of results.

      - */ - NextToken?: string; - - /** - *

      The maximum number of results to return per page.

      + *

      Unique identifier for the workflow.

      */ - MaxResults?: number; + WorkflowId: string | undefined; } -export namespace ListIdentityResolutionJobsRequest { +export namespace GetWorkflowRequest { /** * @internal */ - export const filterSensitiveLog = (obj: ListIdentityResolutionJobsRequest): any => ({ + export const filterSensitiveLog = (obj: GetWorkflowRequest): any => ({ ...obj, }); } /** - *

      Information about the Identity Resolution Job.

      + *

      Structure to hold workflow attributes.

      */ -export interface IdentityResolutionJob { +export interface WorkflowAttributes { /** - *

      The unique name of the domain.

      + *

      Workflow attributes specific to APPFLOW_INTEGRATION workflow.

      */ - DomainName?: string; + AppflowIntegration?: AppflowIntegrationWorkflowAttributes; +} +export namespace WorkflowAttributes { /** - *

      The unique identifier of the Identity Resolution Job.

      + * @internal */ - JobId?: string; + export const filterSensitiveLog = (obj: WorkflowAttributes): any => ({ + ...obj, + }); +} +/** + *

      Generic object containing workflow execution metrics.

      + */ +export interface WorkflowMetrics { /** - *

      The status of the Identity Resolution Job.

      - *
        - *
      • - *

        - * PENDING: The Identity Resolution Job is scheduled but has not started yet. If you turn - * off the Identity Resolution feature in your domain, jobs in the PENDING state are - * deleted.

        - *
      • - *
      • - *

        - * PREPROCESSING: The Identity Resolution Job is loading your data.

        - *
      • - *
      • - *

        - * FIND_MATCHING: The Identity Resolution Job is using the machine learning model to - * identify profiles that belong to the same matching group.

        - *
      • - *
      • - *

        - * MERGING: The Identity Resolution Job is merging duplicate profiles.

        - *
      • - *
      • - *

        - * COMPLETED: The Identity Resolution Job completed successfully.

        - *
      • - *
      • - *

        - * PARTIAL_SUCCESS: There's a system error and not all of the data is - * merged. The Identity Resolution Job writes a message indicating the source of the problem.

        - *
      • - *
      • - *

        - * FAILED: The Identity Resolution Job did not merge any data. It writes a message - * indicating the source of the problem.

        - *
      • - *
      + *

      Workflow execution metrics for APPFLOW_INTEGRATION workflow.

      */ - Status?: IdentityResolutionJobStatus | string; + AppflowIntegration?: AppflowIntegrationWorkflowMetrics; +} +export namespace WorkflowMetrics { /** - *

      The timestamp of when the job was started or will be started.

      + * @internal */ - JobStartTime?: Date; + export const filterSensitiveLog = (obj: WorkflowMetrics): any => ({ + ...obj, + }); +} +export interface GetWorkflowResponse { /** - *

      The timestamp of when the job was completed.

      + *

      Unique identifier for the workflow.

      */ - JobEndTime?: Date; + WorkflowId?: string; /** - *

      Statistics about an Identity Resolution Job.

      + *

      The type of workflow. The only supported value is APPFLOW_INTEGRATION.

      */ - JobStats?: JobStats; + WorkflowType?: WorkflowType | string; /** - *

      The S3 location where the Identity Resolution Job writes result files.

      + *

      Status of workflow execution.

      */ - ExportingLocation?: ExportingLocation; + Status?: Status | string; /** - *

      The error messages that are generated when the Identity Resolution Job runs.

      + *

      Workflow error messages during execution (if any).

      */ - Message?: string; -} + ErrorDescription?: string; -export namespace IdentityResolutionJob { /** - * @internal + *

      The timestamp that represents when workflow execution started.

      */ - export const filterSensitiveLog = (obj: IdentityResolutionJob): any => ({ - ...obj, - }); -} + StartDate?: Date; -export interface ListIdentityResolutionJobsResponse { /** - *

      A list of Identity Resolution Jobs.

      + *

      The timestamp that represents when workflow execution last updated.

      */ - IdentityResolutionJobsList?: IdentityResolutionJob[]; + LastUpdatedAt?: Date; /** - *

      If there are additional results, this is the token for the next set of results.

      + *

      Attributes provided for workflow execution.

      */ - NextToken?: string; + Attributes?: WorkflowAttributes; + + /** + *

      Workflow specific execution metrics.

      + */ + Metrics?: WorkflowMetrics; } -export namespace ListIdentityResolutionJobsResponse { +export namespace GetWorkflowResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ListIdentityResolutionJobsResponse): any => ({ + export const filterSensitiveLog = (obj: GetWorkflowResponse): any => ({ ...obj, }); } -export interface ListIntegrationsRequest { +export interface GetWorkflowStepsRequest { /** *

      The unique name of the domain.

      */ DomainName: string | undefined; /** - *

      The pagination token from the previous ListIntegrations API call.

      + *

      Unique identifier for the workflow.

      + */ + WorkflowId: string | undefined; + + /** + *

      The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

      */ NextToken?: string; /** - *

      The maximum number of objects returned per page.

      + *

      The maximum number of results to return per page.

      */ MaxResults?: number; } -export namespace ListIntegrationsRequest { +export namespace GetWorkflowStepsRequest { /** * @internal */ - export const filterSensitiveLog = (obj: ListIntegrationsRequest): any => ({ + export const filterSensitiveLog = (obj: GetWorkflowStepsRequest): any => ({ ...obj, }); } -export interface ListIntegrationsResponse { - /** - *

      The list of ListIntegrations instances.

      - */ - Items?: ListIntegrationItem[]; - +/** + *

      List containing steps in workflow.

      + */ +export interface WorkflowStepItem { /** - *

      The pagination token from the previous ListIntegrations API call.

      + *

      Workflow step information specific to APPFLOW_INTEGRATION workflow.

      */ - NextToken?: string; + AppflowIntegration?: AppflowIntegrationWorkflowStep; } -export namespace ListIntegrationsResponse { +export namespace WorkflowStepItem { /** * @internal */ - export const filterSensitiveLog = (obj: ListIntegrationsResponse): any => ({ + export const filterSensitiveLog = (obj: WorkflowStepItem): any => ({ ...obj, }); } -/** - *

      The filter applied to ListProfileObjects response to include profile objects with the - * specified index values. This filter is only supported for ObjectTypeName _asset, _case and - * _order.

      - */ -export interface ObjectFilter { +export interface GetWorkflowStepsResponse { /** - *

      A searchable identifier of a standard profile object. The predefined keys you can use to - * search for _asset include: _assetId, _assetName, _serialNumber. The predefined keys you can - * use to search for _case include: _caseId. The predefined keys you can use to search for - * _order include: _orderId.

      + *

      Unique identifier for the workflow.

      */ - KeyName: string | undefined; + WorkflowId?: string; /** - *

      A list of key values.

      + *

      The type of workflow. The only supported value is APPFLOW_INTEGRATION.

      */ - Values: string[] | undefined; + WorkflowType?: WorkflowType | string; + + /** + *

      List containing workflow step details.

      + */ + Items?: WorkflowStepItem[]; + + /** + *

      If there are additional results, this is the token for the next set of results.

      + */ + NextToken?: string; } -export namespace ObjectFilter { +export namespace GetWorkflowStepsResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ObjectFilter): any => ({ + export const filterSensitiveLog = (obj: GetWorkflowStepsResponse): any => ({ ...obj, }); } -export interface ListProfileObjectsRequest { +export interface ListAccountIntegrationsRequest { /** - *

      The pagination token from the previous call to ListProfileObjects.

      + *

      The URI of the S3 bucket or any other type of data source.

      + */ + Uri: string | undefined; + + /** + *

      The pagination token from the previous ListAccountIntegrations API call.

      */ NextToken?: string; @@ -2300,96 +2853,101 @@ export interface ListProfileObjectsRequest { */ MaxResults?: number; + /** + *

      Boolean to indicate if hidden integration should be returned. Defaults to False.

      + */ + IncludeHidden?: boolean; +} + +export namespace ListAccountIntegrationsRequest { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ListAccountIntegrationsRequest): any => ({ + ...obj, + }); +} + +/** + *

      An integration in list of integrations.

      + */ +export interface ListIntegrationItem { /** *

      The unique name of the domain.

      */ DomainName: string | undefined; /** - *

      The name of the profile object type.

      + *

      The URI of the S3 bucket or any other type of data source.

      */ - ObjectTypeName: string | undefined; + Uri: string | undefined; /** - *

      The unique identifier of a customer profile.

      + *

      The name of the profile object type.

      */ - ProfileId: string | undefined; + ObjectTypeName?: string; /** - *

      Applies a filter to the response to include profile objects with the specified index - * values. This filter is only supported for ObjectTypeName _asset, _case and _order.

      + *

      The timestamp of when the domain was created.

      */ - ObjectFilter?: ObjectFilter; -} + CreatedAt: Date | undefined; -export namespace ListProfileObjectsRequest { /** - * @internal + *

      The timestamp of when the domain was most recently edited.

      */ - export const filterSensitiveLog = (obj: ListProfileObjectsRequest): any => ({ - ...obj, - }); -} + LastUpdatedAt: Date | undefined; -/** - *

      A ProfileObject in a list of ProfileObjects.

      - */ -export interface ListProfileObjectsItem { /** - *

      Specifies the kind of object being added to a profile, such as - * "Salesforce-Account."

      + *

      The tags used to organize, track, or control access for this resource.

      */ - ObjectTypeName?: string; + Tags?: { [key: string]: string }; /** - *

      The unique identifier of the ProfileObject generated by the service.

      + *

      A map in which each key is an event type from an external application such as Segment or Shopify, and each value is an ObjectTypeName (template) used to ingest the event. + * It supports the following event types: SegmentIdentify, ShopifyCreateCustomers, ShopifyUpdateCustomers, ShopifyCreateDraftOrders, + * ShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

      */ - ProfileObjectUniqueKey?: string; + ObjectTypeNames?: { [key: string]: string }; /** - *

      A JSON representation of a ProfileObject that belongs to a profile.

      + *

      Unique identifier for the workflow.

      */ - Object?: string; + WorkflowId?: string; } -export namespace ListProfileObjectsItem { +export namespace ListIntegrationItem { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectsItem): any => ({ + export const filterSensitiveLog = (obj: ListIntegrationItem): any => ({ ...obj, }); } -export interface ListProfileObjectsResponse { +export interface ListAccountIntegrationsResponse { /** - *

      The list of ListProfileObject instances.

      + *

      The list of ListAccountIntegration instances.

      */ - Items?: ListProfileObjectsItem[]; + Items?: ListIntegrationItem[]; /** - *

      The pagination token from the previous call to ListProfileObjects.

      + *

      The pagination token from the previous ListAccountIntegrations API call.

      */ NextToken?: string; } -export namespace ListProfileObjectsResponse { +export namespace ListAccountIntegrationsResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectsResponse): any => ({ + export const filterSensitiveLog = (obj: ListAccountIntegrationsResponse): any => ({ ...obj, }); } -export interface ListProfileObjectTypesRequest { - /** - *

      The unique name of the domain.

      - */ - DomainName: string | undefined; - +export interface ListDomainsRequest { /** - *

      Identifies the next page of results to return.

      + *

      The pagination token from the previous ListDomain API call.

      */ NextToken?: string; @@ -2399,38 +2957,33 @@ export interface ListProfileObjectTypesRequest { MaxResults?: number; } -export namespace ListProfileObjectTypesRequest { +export namespace ListDomainsRequest { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectTypesRequest): any => ({ + export const filterSensitiveLog = (obj: ListDomainsRequest): any => ({ ...obj, }); } /** - *

      A ProfileObjectType instance.

      + *

      An object in a list that represents a domain.

      */ -export interface ListProfileObjectTypeItem { - /** - *

      The name of the profile object type.

      - */ - ObjectTypeName: string | undefined; - +export interface ListDomainItem { /** - *

      Description of the profile object type.

      + *

      The unique name of the domain.

      */ - Description: string | undefined; + DomainName: string | undefined; /** *

      The timestamp of when the domain was created.

      */ - CreatedAt?: Date; + CreatedAt: Date | undefined; /** *

      The timestamp of when the domain was most recently edited.

      */ - LastUpdatedAt?: Date; + LastUpdatedAt: Date | undefined; /** *

      The tags used to organize, track, or control access for this resource.

      @@ -2438,752 +2991,812 @@ export interface ListProfileObjectTypeItem { Tags?: { [key: string]: string }; } -export namespace ListProfileObjectTypeItem { +export namespace ListDomainItem { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectTypeItem): any => ({ + export const filterSensitiveLog = (obj: ListDomainItem): any => ({ ...obj, }); } -export interface ListProfileObjectTypesResponse { +export interface ListDomainsResponse { /** - *

      The list of ListProfileObjectTypes instances.

      + *

      The list of ListDomains instances.

      */ - Items?: ListProfileObjectTypeItem[]; + Items?: ListDomainItem[]; /** - *

      Identifies the next page of results to return.

      + *

      The pagination token from the previous ListDomains API call.

      */ NextToken?: string; } -export namespace ListProfileObjectTypesResponse { +export namespace ListDomainsResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectTypesResponse): any => ({ + export const filterSensitiveLog = (obj: ListDomainsResponse): any => ({ ...obj, }); } -export interface ListProfileObjectTypeTemplatesRequest { +export interface ListIdentityResolutionJobsRequest { /** - *

      The pagination token from the previous ListObjectTypeTemplates API call.

      + *

      The unique name of the domain.

      + */ + DomainName: string | undefined; + + /** + *

      The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

      */ NextToken?: string; /** - *

      The maximum number of objects returned per page.

      + *

      The maximum number of results to return per page.

      */ MaxResults?: number; } -export namespace ListProfileObjectTypeTemplatesRequest { +export namespace ListIdentityResolutionJobsRequest { /** * @internal */ - export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplatesRequest): any => ({ + export const filterSensitiveLog = (obj: ListIdentityResolutionJobsRequest): any => ({ ...obj, }); } /** - *

      A ProfileObjectTypeTemplate in a list of ProfileObjectTypeTemplates.

      + *

      Information about the Identity Resolution Job.

      */ -export interface ListProfileObjectTypeTemplateItem { +export interface IdentityResolutionJob { /** - *

      A unique identifier for the object template.

      + *

      The unique name of the domain.

      */ - TemplateId?: string; + DomainName?: string; /** - *

      The name of the source of the object template.

      + *

      The unique identifier of the Identity Resolution Job.

      */ - SourceName?: string; + JobId?: string; /** - *

      The source of the object template.

      + *

      The status of the Identity Resolution Job.

      + *
        + *
      • + *

        + * PENDING: The Identity Resolution Job is scheduled but has not started yet. If you turn + * off the Identity Resolution feature in your domain, jobs in the PENDING state are + * deleted.

        + *
      • + *
      • + *

        + * PREPROCESSING: The Identity Resolution Job is loading your data.

        + *
      • + *
      • + *

        + * FIND_MATCHING: The Identity Resolution Job is using the machine learning model to + * identify profiles that belong to the same matching group.

        + *
      • + *
      • + *

        + * MERGING: The Identity Resolution Job is merging duplicate profiles.

        + *
      • + *
      • + *

        + * COMPLETED: The Identity Resolution Job completed successfully.

        + *
      • + *
      • + *

        + * PARTIAL_SUCCESS: There's a system error and not all of the data is + * merged. The Identity Resolution Job writes a message indicating the source of the problem.

        + *
      • + *
      • + *

        + * FAILED: The Identity Resolution Job did not merge any data. It writes a message + * indicating the source of the problem.

        + *
      • + *
      */ - SourceObject?: string; -} + Status?: IdentityResolutionJobStatus | string; -export namespace ListProfileObjectTypeTemplateItem { /** - * @internal + *

      The timestamp of when the job was started or will be started.

      */ - export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplateItem): any => ({ - ...obj, - }); -} + JobStartTime?: Date; -export interface ListProfileObjectTypeTemplatesResponse { /** - *

      The list of ListProfileObjectType template instances.

      + *

      The timestamp of when the job was completed.

      */ - Items?: ListProfileObjectTypeTemplateItem[]; + JobEndTime?: Date; /** - *

      The pagination token from the previous ListObjectTypeTemplates API call.

      + *

      Statistics about an Identity Resolution Job.

      */ - NextToken?: string; -} + JobStats?: JobStats; -export namespace ListProfileObjectTypeTemplatesResponse { /** - * @internal + *

      The S3 location where the Identity Resolution Job writes result files.

      */ - export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplatesResponse): any => ({ - ...obj, - }); -} + ExportingLocation?: ExportingLocation; -export interface ListTagsForResourceRequest { /** - *

      The ARN of the resource for which you want to view tags.

      + *

      The error messages that are generated when the Identity Resolution Job runs.

      */ - resourceArn: string | undefined; + Message?: string; } -export namespace ListTagsForResourceRequest { +export namespace IdentityResolutionJob { /** * @internal */ - export const filterSensitiveLog = (obj: ListTagsForResourceRequest): any => ({ + export const filterSensitiveLog = (obj: IdentityResolutionJob): any => ({ ...obj, }); } -export interface ListTagsForResourceResponse { +export interface ListIdentityResolutionJobsResponse { /** - *

      The tags used to organize, track, or control access for this resource.

      + *

      A list of Identity Resolution Jobs.

      */ - tags?: { [key: string]: string }; + IdentityResolutionJobsList?: IdentityResolutionJob[]; + + /** + *

      If there are additional results, this is the token for the next set of results.

      + */ + NextToken?: string; } -export namespace ListTagsForResourceResponse { +export namespace ListIdentityResolutionJobsResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ListTagsForResourceResponse): any => ({ + export const filterSensitiveLog = (obj: ListIdentityResolutionJobsResponse): any => ({ ...obj, }); } -/** - *

      A duplicate customer profile that is to be merged into a main profile.

      - */ -export interface FieldSourceProfileIds { +export interface ListIntegrationsRequest { /** - *

      A unique identifier for the account number field to be merged.

      + *

      The unique name of the domain.

      */ - AccountNumber?: string; + DomainName: string | undefined; /** - *

      A unique identifier for the additional information field to be merged.

      + *

      The pagination token from the previous ListIntegrations API call.

      */ - AdditionalInformation?: string; + NextToken?: string; /** - *

      A unique identifier for the party type field to be merged.

      + *

      The maximum number of objects returned per page.

      */ - PartyType?: string; + MaxResults?: number; /** - *

      A unique identifier for the business name field to be merged.

      + *

      Boolean to indicate if hidden integration should be returned. Defaults to False.

      */ - BusinessName?: string; + IncludeHidden?: boolean; +} + +export namespace ListIntegrationsRequest { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ListIntegrationsRequest): any => ({ + ...obj, + }); +} +export interface ListIntegrationsResponse { /** - *

      A unique identifier for the first name field to be merged.

      + *

      The list of ListIntegrations instances.

      */ - FirstName?: string; + Items?: ListIntegrationItem[]; /** - *

      A unique identifier for the middle name field to be merged.

      + *

      The pagination token from the previous ListIntegrations API call.

      */ - MiddleName?: string; + NextToken?: string; +} +export namespace ListIntegrationsResponse { /** - *

      A unique identifier for the last name field to be merged.

      + * @internal */ - LastName?: string; + export const filterSensitiveLog = (obj: ListIntegrationsResponse): any => ({ + ...obj, + }); +} + +/** + *

      The filter applied to ListProfileObjects response to include profile objects with the + * specified index values. This filter is only supported for ObjectTypeName _asset, _case and + * _order.

      + */ +export interface ObjectFilter { + /** + *

      A searchable identifier of a standard profile object. The predefined keys you can use to + * search for _asset include: _assetId, _assetName, _serialNumber. The predefined keys you can + * use to search for _case include: _caseId. The predefined keys you can use to search for + * _order include: _orderId.

      + */ + KeyName: string | undefined; /** - *

      A unique identifier for the birthdate field to be merged.

      + *

      A list of key values.

      */ - BirthDate?: string; + Values: string[] | undefined; +} +export namespace ObjectFilter { /** - *

      A unique identifier for the gender field to be merged.

      + * @internal */ - Gender?: string; + export const filterSensitiveLog = (obj: ObjectFilter): any => ({ + ...obj, + }); +} +export interface ListProfileObjectsRequest { /** - *

      A unique identifier for the phone number field to be merged.

      + *

      The pagination token from the previous call to ListProfileObjects.

      */ - PhoneNumber?: string; + NextToken?: string; /** - *

      A unique identifier for the mobile phone number field to be merged.

      + *

      The maximum number of objects returned per page.

      + */ + MaxResults?: number; + + /** + *

      The unique name of the domain.

      */ - MobilePhoneNumber?: string; + DomainName: string | undefined; /** - *

      A unique identifier for the home phone number field to be merged.

      + *

      The name of the profile object type.

      */ - HomePhoneNumber?: string; + ObjectTypeName: string | undefined; /** - *

      A unique identifier for the business phone number field to be merged.

      + *

      The unique identifier of a customer profile.

      */ - BusinessPhoneNumber?: string; + ProfileId: string | undefined; /** - *

      A unique identifier for the email address field to be merged.

      + *

      Applies a filter to the response to include profile objects with the specified index + * values. This filter is only supported for ObjectTypeName _asset, _case and _order.

      */ - EmailAddress?: string; + ObjectFilter?: ObjectFilter; +} +export namespace ListProfileObjectsRequest { /** - *

      A unique identifier for the personal email address field to be merged.

      + * @internal */ - PersonalEmailAddress?: string; + export const filterSensitiveLog = (obj: ListProfileObjectsRequest): any => ({ + ...obj, + }); +} +/** + *

      A ProfileObject in a list of ProfileObjects.

      + */ +export interface ListProfileObjectsItem { /** - *

      A unique identifier for the party type field to be merged.

      + *

      Specifies the kind of object being added to a profile, such as + * "Salesforce-Account."

      */ - BusinessEmailAddress?: string; + ObjectTypeName?: string; /** - *

      A unique identifier for the party type field to be merged.

      + *

      The unique identifier of the ProfileObject generated by the service.

      */ - Address?: string; + ProfileObjectUniqueKey?: string; /** - *

      A unique identifier for the shipping address field to be merged.

      + *

      A JSON representation of a ProfileObject that belongs to a profile.

      */ - ShippingAddress?: string; + Object?: string; +} +export namespace ListProfileObjectsItem { /** - *

      A unique identifier for the mailing address field to be merged.

      + * @internal */ - MailingAddress?: string; + export const filterSensitiveLog = (obj: ListProfileObjectsItem): any => ({ + ...obj, + }); +} +export interface ListProfileObjectsResponse { /** - *

      A unique identifier for the billing type field to be merged.

      + *

      The list of ListProfileObject instances.

      */ - BillingAddress?: string; + Items?: ListProfileObjectsItem[]; /** - *

      A unique identifier for the attributes field to be merged.

      + *

      The pagination token from the previous call to ListProfileObjects.

      */ - Attributes?: { [key: string]: string }; + NextToken?: string; } -export namespace FieldSourceProfileIds { +export namespace ListProfileObjectsResponse { /** * @internal */ - export const filterSensitiveLog = (obj: FieldSourceProfileIds): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectsResponse): any => ({ ...obj, }); } -export interface MergeProfilesRequest { +export interface ListProfileObjectTypesRequest { /** *

      The unique name of the domain.

      */ DomainName: string | undefined; /** - *

      The identifier of the profile to be taken.

      - */ - MainProfileId: string | undefined; - - /** - *

      The identifier of the profile to be merged into MainProfileId.

      + *

      Identifies the next page of results to return.

      */ - ProfileIdsToBeMerged: string[] | undefined; + NextToken?: string; /** - *

      The identifiers of the fields in the profile that has the information you want to apply - * to the merge. For example, say you want to merge EmailAddress from Profile1 into - * MainProfile. This would be the identifier of the EmailAddress field in Profile1.

      + *

      The maximum number of objects returned per page.

      */ - FieldSourceProfileIds?: FieldSourceProfileIds; + MaxResults?: number; } -export namespace MergeProfilesRequest { +export namespace ListProfileObjectTypesRequest { /** * @internal */ - export const filterSensitiveLog = (obj: MergeProfilesRequest): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypesRequest): any => ({ ...obj, }); } -export interface MergeProfilesResponse { +/** + *

      A ProfileObjectType instance.

      + */ +export interface ListProfileObjectTypeItem { /** - *

      A message that indicates the merge request is complete.

      + *

      The name of the profile object type.

      */ - Message?: string; + ObjectTypeName: string | undefined; + + /** + *

      Description of the profile object type.

      + */ + Description: string | undefined; + + /** + *

      The timestamp of when the domain was created.

      + */ + CreatedAt?: Date; + + /** + *

      The timestamp of when the domain was most recently edited.

      + */ + LastUpdatedAt?: Date; + + /** + *

      The tags used to organize, track, or control access for this resource.

      + */ + Tags?: { [key: string]: string }; } -export namespace MergeProfilesResponse { +export namespace ListProfileObjectTypeItem { /** * @internal */ - export const filterSensitiveLog = (obj: MergeProfilesResponse): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypeItem): any => ({ ...obj, }); } -export enum SourceConnectorType { - MARKETO = "Marketo", - S3 = "S3", - SALESFORCE = "Salesforce", - SERVICENOW = "Servicenow", - ZENDESK = "Zendesk", -} +export interface ListProfileObjectTypesResponse { + /** + *

      The list of ListProfileObjectTypes instances.

      + */ + Items?: ListProfileObjectTypeItem[]; -/** - *

      Specifies the configuration used when importing incremental records from the - * source.

      - */ -export interface IncrementalPullConfig { /** - *

      A field that specifies the date time or timestamp field as the criteria to use when - * importing incremental records from the source.

      + *

      Identifies the next page of results to return.

      */ - DatetimeTypeFieldName?: string; + NextToken?: string; } -export namespace IncrementalPullConfig { +export namespace ListProfileObjectTypesResponse { /** * @internal */ - export const filterSensitiveLog = (obj: IncrementalPullConfig): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypesResponse): any => ({ ...obj, }); } -/** - *

      The properties that are applied when Marketo is being used as a source.

      - */ -export interface MarketoSourceProperties { +export interface ListProfileObjectTypeTemplatesRequest { /** - *

      The object specified in the Marketo flow source.

      + *

      The pagination token from the previous ListObjectTypeTemplates API call.

      */ - Object: string | undefined; + NextToken?: string; + + /** + *

      The maximum number of objects returned per page.

      + */ + MaxResults?: number; } -export namespace MarketoSourceProperties { +export namespace ListProfileObjectTypeTemplatesRequest { /** * @internal */ - export const filterSensitiveLog = (obj: MarketoSourceProperties): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplatesRequest): any => ({ ...obj, }); } /** - *

      The properties that are applied when Amazon S3 is being used as the flow source.

      + *

      A ProfileObjectTypeTemplate in a list of ProfileObjectTypeTemplates.

      */ -export interface S3SourceProperties { +export interface ListProfileObjectTypeTemplateItem { /** - *

      The Amazon S3 bucket name where the source files are stored.

      + *

      A unique identifier for the object template.

      */ - BucketName: string | undefined; + TemplateId?: string; /** - *

      The object key for the Amazon S3 bucket in which the source files are stored.

      + *

      The name of the source of the object template.

      */ - BucketPrefix?: string; + SourceName?: string; + + /** + *

      The source of the object template.

      + */ + SourceObject?: string; } -export namespace S3SourceProperties { +export namespace ListProfileObjectTypeTemplateItem { /** * @internal */ - export const filterSensitiveLog = (obj: S3SourceProperties): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplateItem): any => ({ ...obj, }); } -/** - *

      The properties that are applied when Salesforce is being used as a source.

      - */ -export interface SalesforceSourceProperties { - /** - *

      The object specified in the Salesforce flow source.

      - */ - Object: string | undefined; - +export interface ListProfileObjectTypeTemplatesResponse { /** - *

      The flag that enables dynamic fetching of new (recently added) fields in the Salesforce - * objects while running a flow.

      + *

      The list of ListProfileObjectType template instances.

      */ - EnableDynamicFieldUpdate?: boolean; + Items?: ListProfileObjectTypeTemplateItem[]; /** - *

      Indicates whether Amazon AppFlow includes deleted files in the flow run.

      + *

      The pagination token from the previous ListObjectTypeTemplates API call.

      */ - IncludeDeletedRecords?: boolean; + NextToken?: string; } -export namespace SalesforceSourceProperties { +export namespace ListProfileObjectTypeTemplatesResponse { /** * @internal */ - export const filterSensitiveLog = (obj: SalesforceSourceProperties): any => ({ + export const filterSensitiveLog = (obj: ListProfileObjectTypeTemplatesResponse): any => ({ ...obj, }); } -/** - *

      The properties that are applied when ServiceNow is being used as a source.

      - */ -export interface ServiceNowSourceProperties { +export interface ListTagsForResourceRequest { /** - *

      The object specified in the ServiceNow flow source.

      + *

      The ARN of the resource for which you want to view tags.

      */ - Object: string | undefined; + resourceArn: string | undefined; } -export namespace ServiceNowSourceProperties { +export namespace ListTagsForResourceRequest { /** * @internal */ - export const filterSensitiveLog = (obj: ServiceNowSourceProperties): any => ({ + export const filterSensitiveLog = (obj: ListTagsForResourceRequest): any => ({ ...obj, }); } -/** - *

      The properties that are applied when using Zendesk as a flow source.

      - */ -export interface ZendeskSourceProperties { +export interface ListTagsForResourceResponse { /** - *

      The object specified in the Zendesk flow source.

      + *

      The tags used to organize, track, or control access for this resource.

      */ - Object: string | undefined; + tags?: { [key: string]: string }; } -export namespace ZendeskSourceProperties { +export namespace ListTagsForResourceResponse { /** * @internal */ - export const filterSensitiveLog = (obj: ZendeskSourceProperties): any => ({ + export const filterSensitiveLog = (obj: ListTagsForResourceResponse): any => ({ ...obj, }); } -/** - *

      Specifies the information that is required to query a particular Amazon AppFlow connector. - * Customer Profiles supports Salesforce, Zendesk, Marketo, ServiceNow and Amazon S3.

      - */ -export interface SourceConnectorProperties { +export interface ListWorkflowsRequest { /** - *

      The properties that are applied when Marketo is being used as a source.

      + *

      The unique name of the domain.

      */ - Marketo?: MarketoSourceProperties; + DomainName: string | undefined; /** - *

      The properties that are applied when Amazon S3 is being used as the flow source.

      + *

      The type of workflow. The only supported value is APPFLOW_INTEGRATION.

      */ - S3?: S3SourceProperties; + WorkflowType?: WorkflowType | string; /** - *

      The properties that are applied when Salesforce is being used as a source.

      + *

      Status of workflow execution.

      */ - Salesforce?: SalesforceSourceProperties; + Status?: Status | string; /** - *

      The properties that are applied when ServiceNow is being used as a source.

      + *

      Retrieve workflows started after timestamp.

      */ - ServiceNow?: ServiceNowSourceProperties; + QueryStartDate?: Date; + + /** + *

      Retrieve workflows ended after timestamp.

      + */ + QueryEndDate?: Date; + + /** + *

      The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

      + */ + NextToken?: string; /** - *

      The properties that are applied when using Zendesk as a flow source.

      + *

      The maximum number of results to return per page.

      */ - Zendesk?: ZendeskSourceProperties; + MaxResults?: number; } -export namespace SourceConnectorProperties { +export namespace ListWorkflowsRequest { /** * @internal */ - export const filterSensitiveLog = (obj: SourceConnectorProperties): any => ({ + export const filterSensitiveLog = (obj: ListWorkflowsRequest): any => ({ ...obj, }); } /** - *

      Contains information about the configuration of the source connector used in the - * flow.

      + *

      A workflow in list of workflows.

      */ -export interface SourceFlowConfig { +export interface ListWorkflowsItem { /** - *

      The name of the AppFlow connector profile. This name must be unique for each connector - * profile in the AWS account.

      + *

      The type of workflow. The only supported value is APPFLOW_INTEGRATION.

      */ - ConnectorProfileName?: string; + WorkflowType: WorkflowType | string | undefined; /** - *

      The type of connector, such as Salesforce, Marketo, and so on.

      + *

      Unique identifier for the workflow.

      */ - ConnectorType: SourceConnectorType | string | undefined; + WorkflowId: string | undefined; /** - *

      Defines the configuration for a scheduled incremental data pull. If a valid - * configuration is provided, the fields specified in the configuration are used when querying - * for the incremental data pull.

      + *

      Status of workflow execution.

      */ - IncrementalPullConfig?: IncrementalPullConfig; + Status: Status | string | undefined; /** - *

      Specifies the information that is required to query a particular source - * connector.

      + *

      Description for workflow execution status.

      */ - SourceConnectorProperties: SourceConnectorProperties | undefined; + StatusDescription: string | undefined; + + /** + *

      Creation timestamp for workflow.

      + */ + CreatedAt: Date | undefined; + + /** + *

      Last updated timestamp for workflow.

      + */ + LastUpdatedAt: Date | undefined; } -export namespace SourceFlowConfig { +export namespace ListWorkflowsItem { /** * @internal */ - export const filterSensitiveLog = (obj: SourceFlowConfig): any => ({ + export const filterSensitiveLog = (obj: ListWorkflowsItem): any => ({ ...obj, }); } -export enum OperatorPropertiesKeys { - CONCAT_FORMAT = "CONCAT_FORMAT", - DATA_TYPE = "DATA_TYPE", - DESTINATION_DATA_TYPE = "DESTINATION_DATA_TYPE", - LOWER_BOUND = "LOWER_BOUND", - MASK_LENGTH = "MASK_LENGTH", - MASK_VALUE = "MASK_VALUE", - MATH_OPERATION_FIELDS_ORDER = "MATH_OPERATION_FIELDS_ORDER", - SOURCE_DATA_TYPE = "SOURCE_DATA_TYPE", - SUBFIELD_CATEGORY_MAP = "SUBFIELD_CATEGORY_MAP", - TRUNCATE_LENGTH = "TRUNCATE_LENGTH", - UPPER_BOUND = "UPPER_BOUND", - VALIDATION_ACTION = "VALIDATION_ACTION", - VALUE = "VALUE", - VALUES = "VALUES", +export interface ListWorkflowsResponse { + /** + *

      List containing workflow details.

      + */ + Items?: ListWorkflowsItem[]; + + /** + *

      If there are additional results, this is the token for the next set of results.

      + */ + NextToken?: string; } -export enum TaskType { - ARITHMETIC = "Arithmetic", - FILTER = "Filter", - MAP = "Map", - MASK = "Mask", - MERGE = "Merge", - TRUNCATE = "Truncate", - VALIDATE = "Validate", +export namespace ListWorkflowsResponse { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ListWorkflowsResponse): any => ({ + ...obj, + }); } /** - *

      A class for modeling different type of tasks. Task implementation varies based on the - * TaskType.

      + *

      A duplicate customer profile that is to be merged into a main profile.

      */ -export interface Task { +export interface FieldSourceProfileIds { /** - *

      The operation to be performed on the provided source fields.

      + *

      A unique identifier for the account number field to be merged.

      */ - ConnectorOperator?: ConnectorOperator; + AccountNumber?: string; /** - *

      A field in a destination connector, or a field value against which Amazon AppFlow validates a - * source field.

      + *

      A unique identifier for the additional information field to be merged.

      */ - DestinationField?: string; + AdditionalInformation?: string; /** - *

      The source fields to which a particular task is applied.

      + *

      A unique identifier for the party type field to be merged.

      */ - SourceFields: string[] | undefined; + PartyType?: string; /** - *

      A map used to store task-related information. The service looks for particular - * information based on the TaskType.

      + *

      A unique identifier for the business name field to be merged.

      */ - TaskProperties?: { [key: string]: string }; + BusinessName?: string; /** - *

      Specifies the particular task implementation that Amazon AppFlow performs.

      + *

      A unique identifier for the first name field to be merged.

      */ - TaskType: TaskType | string | undefined; -} + FirstName?: string; -export namespace Task { /** - * @internal + *

      A unique identifier for the middle name field to be merged.

      */ - export const filterSensitiveLog = (obj: Task): any => ({ - ...obj, - }); -} + MiddleName?: string; -export enum DataPullMode { - COMPLETE = "Complete", - INCREMENTAL = "Incremental", -} + /** + *

      A unique identifier for the last name field to be merged.

      + */ + LastName?: string; -/** - *

      Specifies the configuration details of a scheduled-trigger flow that you define. - * Currently, these settings only apply to the scheduled-trigger type.

      - */ -export interface ScheduledTriggerProperties { /** - *

      The scheduling expression that determines the rate at which the schedule will run, for - * example rate (5 minutes).

      + *

      A unique identifier for the birthdate field to be merged.

      */ - ScheduleExpression: string | undefined; + BirthDate?: string; /** - *

      Specifies whether a scheduled flow has an incremental data transfer or a complete data - * transfer for each flow run.

      + *

      A unique identifier for the gender field to be merged.

      */ - DataPullMode?: DataPullMode | string; + Gender?: string; /** - *

      Specifies the scheduled start time for a scheduled-trigger flow.

      + *

      A unique identifier for the phone number field to be merged.

      */ - ScheduleStartTime?: Date; + PhoneNumber?: string; /** - *

      Specifies the scheduled end time for a scheduled-trigger flow.

      + *

      A unique identifier for the mobile phone number field to be merged.

      */ - ScheduleEndTime?: Date; + MobilePhoneNumber?: string; /** - *

      Specifies the time zone used when referring to the date and time of a - * scheduled-triggered flow, such as America/New_York.

      + *

      A unique identifier for the home phone number field to be merged.

      */ - Timezone?: string; + HomePhoneNumber?: string; /** - *

      Specifies the optional offset that is added to the time interval for a - * schedule-triggered flow.

      + *

      A unique identifier for the business phone number field to be merged.

      */ - ScheduleOffset?: number; + BusinessPhoneNumber?: string; /** - *

      Specifies the date range for the records to import from the connector in the first flow - * run.

      + *

      A unique identifier for the email address field to be merged.

      */ - FirstExecutionFrom?: Date; -} + EmailAddress?: string; -export namespace ScheduledTriggerProperties { /** - * @internal + *

      A unique identifier for the personal email address field to be merged.

      */ - export const filterSensitiveLog = (obj: ScheduledTriggerProperties): any => ({ - ...obj, - }); -} + PersonalEmailAddress?: string; -/** - *

      Specifies the configuration details that control the trigger for a flow. Currently, - * these settings only apply to the Scheduled trigger type.

      - */ -export interface TriggerProperties { /** - *

      Specifies the configuration details of a schedule-triggered flow that you define.

      + *

      A unique identifier for the party type field to be merged.

      */ - Scheduled?: ScheduledTriggerProperties; -} + BusinessEmailAddress?: string; -export namespace TriggerProperties { /** - * @internal + *

      A unique identifier for the party type field to be merged.

      */ - export const filterSensitiveLog = (obj: TriggerProperties): any => ({ - ...obj, - }); -} + Address?: string; -export enum TriggerType { - EVENT = "Event", - ONDEMAND = "OnDemand", - SCHEDULED = "Scheduled", -} + /** + *

      A unique identifier for the shipping address field to be merged.

      + */ + ShippingAddress?: string; -/** - *

      The trigger settings that determine how and when Amazon AppFlow runs the specified - * flow.

      - */ -export interface TriggerConfig { /** - *

      Specifies the type of flow trigger. It can be OnDemand, Scheduled, or Event.

      + *

      A unique identifier for the mailing address field to be merged.

      */ - TriggerType: TriggerType | string | undefined; + MailingAddress?: string; /** - *

      Specifies the configuration details of a schedule-triggered flow that you define. - * Currently, these settings only apply to the Scheduled trigger type.

      + *

      A unique identifier for the billing type field to be merged.

      */ - TriggerProperties?: TriggerProperties; + BillingAddress?: string; + + /** + *

      A unique identifier for the attributes field to be merged.

      + */ + Attributes?: { [key: string]: string }; } -export namespace TriggerConfig { +export namespace FieldSourceProfileIds { /** * @internal */ - export const filterSensitiveLog = (obj: TriggerConfig): any => ({ + export const filterSensitiveLog = (obj: FieldSourceProfileIds): any => ({ ...obj, }); } -/** - *

      The configurations that control how Customer Profiles retrieves data from the source, - * Amazon AppFlow. Customer Profiles uses this information to create an AppFlow flow on behalf of - * customers.

      - */ -export interface FlowDefinition { +export interface MergeProfilesRequest { /** - *

      A description of the flow you want to create.

      + *

      The unique name of the domain.

      */ - Description?: string; + DomainName: string | undefined; /** - *

      The specified name of the flow. Use underscores (_) or hyphens (-) only. Spaces are not - * allowed.

      + *

      The identifier of the profile to be taken.

      */ - FlowName: string | undefined; + MainProfileId: string | undefined; /** - *

      The Amazon Resource Name of the AWS Key Management Service (KMS) key you provide for encryption.

      + *

      The identifier of the profile to be merged into MainProfileId.

      */ - KmsArn: string | undefined; + ProfileIdsToBeMerged: string[] | undefined; /** - *

      The configuration that controls how Customer Profiles retrieves data from the - * source.

      + *

      The identifiers of the fields in the profile that has the information you want to apply + * to the merge. For example, say you want to merge EmailAddress from Profile1 into + * MainProfile. This would be the identifier of the EmailAddress field in Profile1.

      */ - SourceFlowConfig: SourceFlowConfig | undefined; + FieldSourceProfileIds?: FieldSourceProfileIds; +} +export namespace MergeProfilesRequest { /** - *

      A list of tasks that Customer Profiles performs while transferring the data in the flow - * run.

      + * @internal */ - Tasks: Task[] | undefined; + export const filterSensitiveLog = (obj: MergeProfilesRequest): any => ({ + ...obj, + }); +} +export interface MergeProfilesResponse { /** - *

      The trigger settings that determine how and when the flow runs.

      + *

      A message that indicates the merge request is complete.

      */ - TriggerConfig: TriggerConfig | undefined; + Message?: string; } -export namespace FlowDefinition { +export namespace MergeProfilesResponse { /** * @internal */ - export const filterSensitiveLog = (obj: FlowDefinition): any => ({ + export const filterSensitiveLog = (obj: MergeProfilesResponse): any => ({ ...obj, }); } @@ -3269,6 +3882,11 @@ export interface PutIntegrationResponse { * ShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

      */ ObjectTypeNames?: { [key: string]: string }; + + /** + *

      Unique identifier for the workflow.

      + */ + WorkflowId?: string; } export namespace PutIntegrationResponse { diff --git a/clients/client-customer-profiles/src/protocols/Aws_restJson1.ts b/clients/client-customer-profiles/src/protocols/Aws_restJson1.ts index 0c587a6a9980..11fd68986dd4 100644 --- a/clients/client-customer-profiles/src/protocols/Aws_restJson1.ts +++ b/clients/client-customer-profiles/src/protocols/Aws_restJson1.ts @@ -20,6 +20,10 @@ import { import { AddProfileKeyCommandInput, AddProfileKeyCommandOutput } from "../commands/AddProfileKeyCommand"; import { CreateDomainCommandInput, CreateDomainCommandOutput } from "../commands/CreateDomainCommand"; +import { + CreateIntegrationWorkflowCommandInput, + CreateIntegrationWorkflowCommandOutput, +} from "../commands/CreateIntegrationWorkflowCommand"; import { CreateProfileCommandInput, CreateProfileCommandOutput } from "../commands/CreateProfileCommand"; import { DeleteDomainCommandInput, DeleteDomainCommandOutput } from "../commands/DeleteDomainCommand"; import { DeleteIntegrationCommandInput, DeleteIntegrationCommandOutput } from "../commands/DeleteIntegrationCommand"; @@ -33,6 +37,7 @@ import { DeleteProfileObjectTypeCommandInput, DeleteProfileObjectTypeCommandOutput, } from "../commands/DeleteProfileObjectTypeCommand"; +import { DeleteWorkflowCommandInput, DeleteWorkflowCommandOutput } from "../commands/DeleteWorkflowCommand"; import { GetAutoMergingPreviewCommandInput, GetAutoMergingPreviewCommandOutput, @@ -52,6 +57,8 @@ import { GetProfileObjectTypeTemplateCommandInput, GetProfileObjectTypeTemplateCommandOutput, } from "../commands/GetProfileObjectTypeTemplateCommand"; +import { GetWorkflowCommandInput, GetWorkflowCommandOutput } from "../commands/GetWorkflowCommand"; +import { GetWorkflowStepsCommandInput, GetWorkflowStepsCommandOutput } from "../commands/GetWorkflowStepsCommand"; import { ListAccountIntegrationsCommandInput, ListAccountIntegrationsCommandOutput, @@ -75,6 +82,7 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "../commands/ListTagsForResourceCommand"; +import { ListWorkflowsCommandInput, ListWorkflowsCommandOutput } from "../commands/ListWorkflowsCommand"; import { MergeProfilesCommandInput, MergeProfilesCommandOutput } from "../commands/MergeProfilesCommand"; import { PutIntegrationCommandInput, PutIntegrationCommandOutput } from "../commands/PutIntegrationCommand"; import { PutProfileObjectCommandInput, PutProfileObjectCommandOutput } from "../commands/PutProfileObjectCommand"; @@ -91,8 +99,13 @@ import { CustomerProfilesServiceException as __BaseException } from "../models/C import { AccessDeniedException, Address, + AppflowIntegration, + AppflowIntegrationWorkflowAttributes, + AppflowIntegrationWorkflowMetrics, + AppflowIntegrationWorkflowStep, AutoMerging, BadRequestException, + Batch, ConflictResolution, ConnectorOperator, Consolidation, @@ -103,6 +116,7 @@ import { FlowDefinition, IdentityResolutionJob, IncrementalPullConfig, + IntegrationConfig, InternalServerException, JobSchedule, JobStats, @@ -111,6 +125,7 @@ import { ListProfileObjectsItem, ListProfileObjectTypeItem, ListProfileObjectTypeTemplateItem, + ListWorkflowsItem, MarketoSourceProperties, MatchingRequest, MatchingResponse, @@ -135,6 +150,9 @@ import { TriggerConfig, TriggerProperties, UpdateAddress, + WorkflowAttributes, + WorkflowMetrics, + WorkflowStepItem, ZendeskSourceProperties, } from "../models/models_0"; @@ -216,6 +234,49 @@ export const serializeAws_restJson1CreateDomainCommand = async ( }); }; +export const serializeAws_restJson1CreateIntegrationWorkflowCommand = async ( + input: CreateIntegrationWorkflowCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers: any = { + "content-type": "application/json", + }; + let resolvedPath = + `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + + "/domains/{DomainName}/workflows/integrations"; + if (input.DomainName !== undefined) { + const labelValue: string = input.DomainName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DomainName."); + } + resolvedPath = resolvedPath.replace("{DomainName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DomainName."); + } + let body: any; + body = JSON.stringify({ + ...(input.IntegrationConfig !== undefined && + input.IntegrationConfig !== null && { + IntegrationConfig: serializeAws_restJson1IntegrationConfig(input.IntegrationConfig, context), + }), + ...(input.ObjectTypeName !== undefined && + input.ObjectTypeName !== null && { ObjectTypeName: input.ObjectTypeName }), + ...(input.RoleArn !== undefined && input.RoleArn !== null && { RoleArn: input.RoleArn }), + ...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_restJson1TagMap(input.Tags, context) }), + ...(input.WorkflowType !== undefined && input.WorkflowType !== null && { WorkflowType: input.WorkflowType }), + }); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1CreateProfileCommand = async ( input: CreateProfileCommandInput, context: __SerdeContext @@ -499,6 +560,45 @@ export const serializeAws_restJson1DeleteProfileObjectTypeCommand = async ( }); }; +export const serializeAws_restJson1DeleteWorkflowCommand = async ( + input: DeleteWorkflowCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers: any = {}; + let resolvedPath = + `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + + "/domains/{DomainName}/workflows/{WorkflowId}"; + if (input.DomainName !== undefined) { + const labelValue: string = input.DomainName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DomainName."); + } + resolvedPath = resolvedPath.replace("{DomainName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DomainName."); + } + if (input.WorkflowId !== undefined) { + const labelValue: string = input.WorkflowId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: WorkflowId."); + } + resolvedPath = resolvedPath.replace("{WorkflowId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: WorkflowId."); + } + let body: any; + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1GetAutoMergingPreviewCommand = async ( input: GetAutoMergingPreviewCommandInput, context: __SerdeContext @@ -743,6 +843,89 @@ export const serializeAws_restJson1GetProfileObjectTypeTemplateCommand = async ( }); }; +export const serializeAws_restJson1GetWorkflowCommand = async ( + input: GetWorkflowCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers: any = {}; + let resolvedPath = + `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + + "/domains/{DomainName}/workflows/{WorkflowId}"; + if (input.DomainName !== undefined) { + const labelValue: string = input.DomainName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DomainName."); + } + resolvedPath = resolvedPath.replace("{DomainName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DomainName."); + } + if (input.WorkflowId !== undefined) { + const labelValue: string = input.WorkflowId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: WorkflowId."); + } + resolvedPath = resolvedPath.replace("{WorkflowId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: WorkflowId."); + } + let body: any; + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetWorkflowStepsCommand = async ( + input: GetWorkflowStepsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers: any = {}; + let resolvedPath = + `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + + "/domains/{DomainName}/workflows/{WorkflowId}/steps"; + if (input.DomainName !== undefined) { + const labelValue: string = input.DomainName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DomainName."); + } + resolvedPath = resolvedPath.replace("{DomainName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DomainName."); + } + if (input.WorkflowId !== undefined) { + const labelValue: string = input.WorkflowId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: WorkflowId."); + } + resolvedPath = resolvedPath.replace("{WorkflowId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: WorkflowId."); + } + const query: any = { + ...(input.NextToken !== undefined && { "next-token": input.NextToken }), + ...(input.MaxResults !== undefined && { "max-results": input.MaxResults.toString() }), + }; + let body: any; + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + export const serializeAws_restJson1ListAccountIntegrationsCommand = async ( input: ListAccountIntegrationsCommandInput, context: __SerdeContext @@ -755,6 +938,7 @@ export const serializeAws_restJson1ListAccountIntegrationsCommand = async ( const query: any = { ...(input.NextToken !== undefined && { "next-token": input.NextToken }), ...(input.MaxResults !== undefined && { "max-results": input.MaxResults.toString() }), + ...(input.IncludeHidden !== undefined && { "include-hidden": input.IncludeHidden.toString() }), }; let body: any; body = JSON.stringify({ @@ -851,6 +1035,7 @@ export const serializeAws_restJson1ListIntegrationsCommand = async ( const query: any = { ...(input.NextToken !== undefined && { "next-token": input.NextToken }), ...(input.MaxResults !== undefined && { "max-results": input.MaxResults.toString() }), + ...(input.IncludeHidden !== undefined && { "include-hidden": input.IncludeHidden.toString() }), }; let body: any; return new __HttpRequest({ @@ -994,6 +1179,50 @@ export const serializeAws_restJson1ListTagsForResourceCommand = async ( }); }; +export const serializeAws_restJson1ListWorkflowsCommand = async ( + input: ListWorkflowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers: any = { + "content-type": "application/json", + }; + let resolvedPath = + `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/domains/{DomainName}/workflows"; + if (input.DomainName !== undefined) { + const labelValue: string = input.DomainName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DomainName."); + } + resolvedPath = resolvedPath.replace("{DomainName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DomainName."); + } + const query: any = { + ...(input.NextToken !== undefined && { "next-token": input.NextToken }), + ...(input.MaxResults !== undefined && { "max-results": input.MaxResults.toString() }), + }; + let body: any; + body = JSON.stringify({ + ...(input.QueryEndDate !== undefined && + input.QueryEndDate !== null && { QueryEndDate: Math.round(input.QueryEndDate.getTime() / 1000) }), + ...(input.QueryStartDate !== undefined && + input.QueryStartDate !== null && { QueryStartDate: Math.round(input.QueryStartDate.getTime() / 1000) }), + ...(input.Status !== undefined && input.Status !== null && { Status: input.Status }), + ...(input.WorkflowType !== undefined && input.WorkflowType !== null && { WorkflowType: input.WorkflowType }), + }); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + query, + body, + }); +}; + export const serializeAws_restJson1MergeProfilesCommand = async ( input: MergeProfilesCommandInput, context: __SerdeContext @@ -1539,6 +1768,66 @@ const deserializeAws_restJson1CreateDomainCommandError = async ( } }; +export const deserializeAws_restJson1CreateIntegrationWorkflowCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateIntegrationWorkflowCommandError(output, context); + } + const contents: CreateIntegrationWorkflowCommandOutput = { + $metadata: deserializeMetadata(output), + Message: undefined, + WorkflowId: undefined, + }; + const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Message !== undefined && data.Message !== null) { + contents.Message = __expectString(data.Message); + } + if (data.WorkflowId !== undefined && data.WorkflowId !== null) { + contents.WorkflowId = __expectString(data.WorkflowId); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateIntegrationWorkflowCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __BaseException; + let errorCode = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.customerprofiles#AccessDeniedException": + throw await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context); + case "BadRequestException": + case "com.amazonaws.customerprofiles#BadRequestException": + throw await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.customerprofiles#InternalServerException": + throw await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.customerprofiles#ResourceNotFoundException": + throw await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context); + case "ThrottlingException": + case "com.amazonaws.customerprofiles#ThrottlingException": + throw await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + response = new __BaseException({ + name: parsedBody.code || parsedBody.Code || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + }); + throw __decorateServiceException(response, parsedBody); + } +}; + export const deserializeAws_restJson1CreateProfileCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -1931,6 +2220,58 @@ const deserializeAws_restJson1DeleteProfileObjectTypeCommandError = async ( } }; +export const deserializeAws_restJson1DeleteWorkflowCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteWorkflowCommandError(output, context); + } + const contents: DeleteWorkflowCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteWorkflowCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __BaseException; + let errorCode = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.customerprofiles#AccessDeniedException": + throw await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context); + case "BadRequestException": + case "com.amazonaws.customerprofiles#BadRequestException": + throw await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.customerprofiles#InternalServerException": + throw await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.customerprofiles#ResourceNotFoundException": + throw await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context); + case "ThrottlingException": + case "com.amazonaws.customerprofiles#ThrottlingException": + throw await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + response = new __BaseException({ + name: parsedBody.code || parsedBody.Code || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + }); + throw __decorateServiceException(response, parsedBody); + } +}; + export const deserializeAws_restJson1GetAutoMergingPreviewCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -2199,6 +2540,7 @@ export const deserializeAws_restJson1GetIntegrationCommand = async ( ObjectTypeNames: undefined, Tags: undefined, Uri: undefined, + WorkflowId: undefined, }; const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); if (data.CreatedAt !== undefined && data.CreatedAt !== null) { @@ -2222,6 +2564,9 @@ export const deserializeAws_restJson1GetIntegrationCommand = async ( if (data.Uri !== undefined && data.Uri !== null) { contents.Uri = __expectString(data.Uri); } + if (data.WorkflowId !== undefined && data.WorkflowId !== null) { + contents.WorkflowId = __expectString(data.WorkflowId); + } return Promise.resolve(contents); }; @@ -2281,22 +2626,202 @@ export const deserializeAws_restJson1GetMatchesCommand = async ( if (data.MatchGenerationDate !== undefined && data.MatchGenerationDate !== null) { contents.MatchGenerationDate = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.MatchGenerationDate))); } - if (data.Matches !== undefined && data.Matches !== null) { - contents.Matches = deserializeAws_restJson1MatchesList(data.Matches, context); + if (data.Matches !== undefined && data.Matches !== null) { + contents.Matches = deserializeAws_restJson1MatchesList(data.Matches, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = __expectString(data.NextToken); + } + if (data.PotentialMatches !== undefined && data.PotentialMatches !== null) { + contents.PotentialMatches = __expectInt32(data.PotentialMatches); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetMatchesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __BaseException; + let errorCode = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.customerprofiles#AccessDeniedException": + throw await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context); + case "BadRequestException": + case "com.amazonaws.customerprofiles#BadRequestException": + throw await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.customerprofiles#InternalServerException": + throw await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.customerprofiles#ResourceNotFoundException": + throw await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context); + case "ThrottlingException": + case "com.amazonaws.customerprofiles#ThrottlingException": + throw await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + response = new __BaseException({ + name: parsedBody.code || parsedBody.Code || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + }); + throw __decorateServiceException(response, parsedBody); + } +}; + +export const deserializeAws_restJson1GetProfileObjectTypeCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetProfileObjectTypeCommandError(output, context); + } + const contents: GetProfileObjectTypeCommandOutput = { + $metadata: deserializeMetadata(output), + AllowProfileCreation: undefined, + CreatedAt: undefined, + Description: undefined, + EncryptionKey: undefined, + ExpirationDays: undefined, + Fields: undefined, + Keys: undefined, + LastUpdatedAt: undefined, + ObjectTypeName: undefined, + SourceLastUpdatedTimestampFormat: undefined, + Tags: undefined, + TemplateId: undefined, + }; + const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.AllowProfileCreation !== undefined && data.AllowProfileCreation !== null) { + contents.AllowProfileCreation = __expectBoolean(data.AllowProfileCreation); + } + if (data.CreatedAt !== undefined && data.CreatedAt !== null) { + contents.CreatedAt = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.CreatedAt))); + } + if (data.Description !== undefined && data.Description !== null) { + contents.Description = __expectString(data.Description); + } + if (data.EncryptionKey !== undefined && data.EncryptionKey !== null) { + contents.EncryptionKey = __expectString(data.EncryptionKey); + } + if (data.ExpirationDays !== undefined && data.ExpirationDays !== null) { + contents.ExpirationDays = __expectInt32(data.ExpirationDays); + } + if (data.Fields !== undefined && data.Fields !== null) { + contents.Fields = deserializeAws_restJson1FieldMap(data.Fields, context); + } + if (data.Keys !== undefined && data.Keys !== null) { + contents.Keys = deserializeAws_restJson1KeyMap(data.Keys, context); + } + if (data.LastUpdatedAt !== undefined && data.LastUpdatedAt !== null) { + contents.LastUpdatedAt = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.LastUpdatedAt))); + } + if (data.ObjectTypeName !== undefined && data.ObjectTypeName !== null) { + contents.ObjectTypeName = __expectString(data.ObjectTypeName); + } + if (data.SourceLastUpdatedTimestampFormat !== undefined && data.SourceLastUpdatedTimestampFormat !== null) { + contents.SourceLastUpdatedTimestampFormat = __expectString(data.SourceLastUpdatedTimestampFormat); + } + if (data.Tags !== undefined && data.Tags !== null) { + contents.Tags = deserializeAws_restJson1TagMap(data.Tags, context); + } + if (data.TemplateId !== undefined && data.TemplateId !== null) { + contents.TemplateId = __expectString(data.TemplateId); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetProfileObjectTypeCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __BaseException; + let errorCode = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.customerprofiles#AccessDeniedException": + throw await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context); + case "BadRequestException": + case "com.amazonaws.customerprofiles#BadRequestException": + throw await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.customerprofiles#InternalServerException": + throw await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.customerprofiles#ResourceNotFoundException": + throw await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context); + case "ThrottlingException": + case "com.amazonaws.customerprofiles#ThrottlingException": + throw await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + response = new __BaseException({ + name: parsedBody.code || parsedBody.Code || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + }); + throw __decorateServiceException(response, parsedBody); + } +}; + +export const deserializeAws_restJson1GetProfileObjectTypeTemplateCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetProfileObjectTypeTemplateCommandError(output, context); + } + const contents: GetProfileObjectTypeTemplateCommandOutput = { + $metadata: deserializeMetadata(output), + AllowProfileCreation: undefined, + Fields: undefined, + Keys: undefined, + SourceLastUpdatedTimestampFormat: undefined, + SourceName: undefined, + SourceObject: undefined, + TemplateId: undefined, + }; + const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.AllowProfileCreation !== undefined && data.AllowProfileCreation !== null) { + contents.AllowProfileCreation = __expectBoolean(data.AllowProfileCreation); + } + if (data.Fields !== undefined && data.Fields !== null) { + contents.Fields = deserializeAws_restJson1FieldMap(data.Fields, context); + } + if (data.Keys !== undefined && data.Keys !== null) { + contents.Keys = deserializeAws_restJson1KeyMap(data.Keys, context); + } + if (data.SourceLastUpdatedTimestampFormat !== undefined && data.SourceLastUpdatedTimestampFormat !== null) { + contents.SourceLastUpdatedTimestampFormat = __expectString(data.SourceLastUpdatedTimestampFormat); + } + if (data.SourceName !== undefined && data.SourceName !== null) { + contents.SourceName = __expectString(data.SourceName); } - if (data.NextToken !== undefined && data.NextToken !== null) { - contents.NextToken = __expectString(data.NextToken); + if (data.SourceObject !== undefined && data.SourceObject !== null) { + contents.SourceObject = __expectString(data.SourceObject); } - if (data.PotentialMatches !== undefined && data.PotentialMatches !== null) { - contents.PotentialMatches = __expectInt32(data.PotentialMatches); + if (data.TemplateId !== undefined && data.TemplateId !== null) { + contents.TemplateId = __expectString(data.TemplateId); } return Promise.resolve(contents); }; -const deserializeAws_restJson1GetMatchesCommandError = async ( +const deserializeAws_restJson1GetProfileObjectTypeTemplateCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -2331,72 +2856,56 @@ const deserializeAws_restJson1GetMatchesCommandError = async ( } }; -export const deserializeAws_restJson1GetProfileObjectTypeCommand = async ( +export const deserializeAws_restJson1GetWorkflowCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1GetProfileObjectTypeCommandError(output, context); + return deserializeAws_restJson1GetWorkflowCommandError(output, context); } - const contents: GetProfileObjectTypeCommandOutput = { + const contents: GetWorkflowCommandOutput = { $metadata: deserializeMetadata(output), - AllowProfileCreation: undefined, - CreatedAt: undefined, - Description: undefined, - EncryptionKey: undefined, - ExpirationDays: undefined, - Fields: undefined, - Keys: undefined, + Attributes: undefined, + ErrorDescription: undefined, LastUpdatedAt: undefined, - ObjectTypeName: undefined, - SourceLastUpdatedTimestampFormat: undefined, - Tags: undefined, - TemplateId: undefined, + Metrics: undefined, + StartDate: undefined, + Status: undefined, + WorkflowId: undefined, + WorkflowType: undefined, }; const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); - if (data.AllowProfileCreation !== undefined && data.AllowProfileCreation !== null) { - contents.AllowProfileCreation = __expectBoolean(data.AllowProfileCreation); - } - if (data.CreatedAt !== undefined && data.CreatedAt !== null) { - contents.CreatedAt = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.CreatedAt))); - } - if (data.Description !== undefined && data.Description !== null) { - contents.Description = __expectString(data.Description); - } - if (data.EncryptionKey !== undefined && data.EncryptionKey !== null) { - contents.EncryptionKey = __expectString(data.EncryptionKey); - } - if (data.ExpirationDays !== undefined && data.ExpirationDays !== null) { - contents.ExpirationDays = __expectInt32(data.ExpirationDays); + if (data.Attributes !== undefined && data.Attributes !== null) { + contents.Attributes = deserializeAws_restJson1WorkflowAttributes(data.Attributes, context); } - if (data.Fields !== undefined && data.Fields !== null) { - contents.Fields = deserializeAws_restJson1FieldMap(data.Fields, context); - } - if (data.Keys !== undefined && data.Keys !== null) { - contents.Keys = deserializeAws_restJson1KeyMap(data.Keys, context); + if (data.ErrorDescription !== undefined && data.ErrorDescription !== null) { + contents.ErrorDescription = __expectString(data.ErrorDescription); } if (data.LastUpdatedAt !== undefined && data.LastUpdatedAt !== null) { contents.LastUpdatedAt = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.LastUpdatedAt))); } - if (data.ObjectTypeName !== undefined && data.ObjectTypeName !== null) { - contents.ObjectTypeName = __expectString(data.ObjectTypeName); + if (data.Metrics !== undefined && data.Metrics !== null) { + contents.Metrics = deserializeAws_restJson1WorkflowMetrics(data.Metrics, context); } - if (data.SourceLastUpdatedTimestampFormat !== undefined && data.SourceLastUpdatedTimestampFormat !== null) { - contents.SourceLastUpdatedTimestampFormat = __expectString(data.SourceLastUpdatedTimestampFormat); + if (data.StartDate !== undefined && data.StartDate !== null) { + contents.StartDate = __expectNonNull(__parseEpochTimestamp(__expectNumber(data.StartDate))); } - if (data.Tags !== undefined && data.Tags !== null) { - contents.Tags = deserializeAws_restJson1TagMap(data.Tags, context); + if (data.Status !== undefined && data.Status !== null) { + contents.Status = __expectString(data.Status); } - if (data.TemplateId !== undefined && data.TemplateId !== null) { - contents.TemplateId = __expectString(data.TemplateId); + if (data.WorkflowId !== undefined && data.WorkflowId !== null) { + contents.WorkflowId = __expectString(data.WorkflowId); + } + if (data.WorkflowType !== undefined && data.WorkflowType !== null) { + contents.WorkflowType = __expectString(data.WorkflowType); } return Promise.resolve(contents); }; -const deserializeAws_restJson1GetProfileObjectTypeCommandError = async ( +const deserializeAws_restJson1GetWorkflowCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -2431,52 +2940,40 @@ const deserializeAws_restJson1GetProfileObjectTypeCommandError = async ( } }; -export const deserializeAws_restJson1GetProfileObjectTypeTemplateCommand = async ( +export const deserializeAws_restJson1GetWorkflowStepsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1GetProfileObjectTypeTemplateCommandError(output, context); + return deserializeAws_restJson1GetWorkflowStepsCommandError(output, context); } - const contents: GetProfileObjectTypeTemplateCommandOutput = { + const contents: GetWorkflowStepsCommandOutput = { $metadata: deserializeMetadata(output), - AllowProfileCreation: undefined, - Fields: undefined, - Keys: undefined, - SourceLastUpdatedTimestampFormat: undefined, - SourceName: undefined, - SourceObject: undefined, - TemplateId: undefined, + Items: undefined, + NextToken: undefined, + WorkflowId: undefined, + WorkflowType: undefined, }; const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); - if (data.AllowProfileCreation !== undefined && data.AllowProfileCreation !== null) { - contents.AllowProfileCreation = __expectBoolean(data.AllowProfileCreation); - } - if (data.Fields !== undefined && data.Fields !== null) { - contents.Fields = deserializeAws_restJson1FieldMap(data.Fields, context); - } - if (data.Keys !== undefined && data.Keys !== null) { - contents.Keys = deserializeAws_restJson1KeyMap(data.Keys, context); - } - if (data.SourceLastUpdatedTimestampFormat !== undefined && data.SourceLastUpdatedTimestampFormat !== null) { - contents.SourceLastUpdatedTimestampFormat = __expectString(data.SourceLastUpdatedTimestampFormat); + if (data.Items !== undefined && data.Items !== null) { + contents.Items = deserializeAws_restJson1WorkflowStepsList(data.Items, context); } - if (data.SourceName !== undefined && data.SourceName !== null) { - contents.SourceName = __expectString(data.SourceName); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = __expectString(data.NextToken); } - if (data.SourceObject !== undefined && data.SourceObject !== null) { - contents.SourceObject = __expectString(data.SourceObject); + if (data.WorkflowId !== undefined && data.WorkflowId !== null) { + contents.WorkflowId = __expectString(data.WorkflowId); } - if (data.TemplateId !== undefined && data.TemplateId !== null) { - contents.TemplateId = __expectString(data.TemplateId); + if (data.WorkflowType !== undefined && data.WorkflowType !== null) { + contents.WorkflowType = __expectString(data.WorkflowType); } return Promise.resolve(contents); }; -const deserializeAws_restJson1GetProfileObjectTypeTemplateCommandError = async ( +const deserializeAws_restJson1GetWorkflowStepsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -2984,6 +3481,66 @@ const deserializeAws_restJson1ListTagsForResourceCommandError = async ( } }; +export const deserializeAws_restJson1ListWorkflowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListWorkflowsCommandError(output, context); + } + const contents: ListWorkflowsCommandOutput = { + $metadata: deserializeMetadata(output), + Items: undefined, + NextToken: undefined, + }; + const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + if (data.Items !== undefined && data.Items !== null) { + contents.Items = deserializeAws_restJson1WorkflowList(data.Items, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = __expectString(data.NextToken); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListWorkflowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __BaseException; + let errorCode = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.customerprofiles#AccessDeniedException": + throw await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context); + case "BadRequestException": + case "com.amazonaws.customerprofiles#BadRequestException": + throw await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.customerprofiles#InternalServerException": + throw await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.customerprofiles#ResourceNotFoundException": + throw await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context); + case "ThrottlingException": + case "com.amazonaws.customerprofiles#ThrottlingException": + throw await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + response = new __BaseException({ + name: parsedBody.code || parsedBody.Code || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + }); + throw __decorateServiceException(response, parsedBody); + } +}; + export const deserializeAws_restJson1MergeProfilesCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -3053,6 +3610,7 @@ export const deserializeAws_restJson1PutIntegrationCommand = async ( ObjectTypeNames: undefined, Tags: undefined, Uri: undefined, + WorkflowId: undefined, }; const data: { [key: string]: any } = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); if (data.CreatedAt !== undefined && data.CreatedAt !== null) { @@ -3076,6 +3634,9 @@ export const deserializeAws_restJson1PutIntegrationCommand = async ( if (data.Uri !== undefined && data.Uri !== null) { contents.Uri = __expectString(data.Uri); } + if (data.WorkflowId !== undefined && data.WorkflowId !== null) { + contents.WorkflowId = __expectString(data.WorkflowId); + } return Promise.resolve(contents); }; @@ -3660,6 +4221,17 @@ const serializeAws_restJson1Address = (input: Address, context: __SerdeContext): }; }; +const serializeAws_restJson1AppflowIntegration = (input: AppflowIntegration, context: __SerdeContext): any => { + return { + ...(input.Batches !== undefined && + input.Batches !== null && { Batches: serializeAws_restJson1Batches(input.Batches, context) }), + ...(input.FlowDefinition !== undefined && + input.FlowDefinition !== null && { + FlowDefinition: serializeAws_restJson1FlowDefinition(input.FlowDefinition, context), + }), + }; +}; + const serializeAws_restJson1Attributes = (input: { [key: string]: string }, context: __SerdeContext): any => { return Object.entries(input).reduce((acc: { [key: string]: any }, [key, value]: [string, any]) => { if (value === null) { @@ -3698,6 +4270,26 @@ const serializeAws_restJson1AutoMerging = (input: AutoMerging, context: __SerdeC }; }; +const serializeAws_restJson1Batch = (input: Batch, context: __SerdeContext): any => { + return { + ...(input.EndTime !== undefined && + input.EndTime !== null && { EndTime: Math.round(input.EndTime.getTime() / 1000) }), + ...(input.StartTime !== undefined && + input.StartTime !== null && { StartTime: Math.round(input.StartTime.getTime() / 1000) }), + }; +}; + +const serializeAws_restJson1Batches = (input: Batch[], context: __SerdeContext): any => { + return input + .filter((e: any) => e != null) + .map((entry) => { + if (entry === null) { + return null as any; + } + return serializeAws_restJson1Batch(entry, context); + }); +}; + const serializeAws_restJson1ConflictResolution = (input: ConflictResolution, context: __SerdeContext): any => { return { ...(input.ConflictResolvingModel !== undefined && @@ -3820,6 +4412,15 @@ const serializeAws_restJson1IncrementalPullConfig = (input: IncrementalPullConfi }; }; +const serializeAws_restJson1IntegrationConfig = (input: IntegrationConfig, context: __SerdeContext): any => { + return { + ...(input.AppflowIntegration !== undefined && + input.AppflowIntegration !== null && { + AppflowIntegration: serializeAws_restJson1AppflowIntegration(input.AppflowIntegration, context), + }), + }; +}; + const serializeAws_restJson1JobSchedule = (input: JobSchedule, context: __SerdeContext): any => { return { ...(input.DayOfTheWeek !== undefined && input.DayOfTheWeek !== null && { DayOfTheWeek: input.DayOfTheWeek }), @@ -4206,6 +4807,50 @@ const deserializeAws_restJson1Address = (output: any, context: __SerdeContext): } as any; }; +const deserializeAws_restJson1AppflowIntegrationWorkflowAttributes = ( + output: any, + context: __SerdeContext +): AppflowIntegrationWorkflowAttributes => { + return { + ConnectorProfileName: __expectString(output.ConnectorProfileName), + RoleArn: __expectString(output.RoleArn), + SourceConnectorType: __expectString(output.SourceConnectorType), + } as any; +}; + +const deserializeAws_restJson1AppflowIntegrationWorkflowMetrics = ( + output: any, + context: __SerdeContext +): AppflowIntegrationWorkflowMetrics => { + return { + RecordsProcessed: __expectLong(output.RecordsProcessed), + StepsCompleted: __expectLong(output.StepsCompleted), + TotalSteps: __expectLong(output.TotalSteps), + } as any; +}; + +const deserializeAws_restJson1AppflowIntegrationWorkflowStep = ( + output: any, + context: __SerdeContext +): AppflowIntegrationWorkflowStep => { + return { + BatchRecordsEndTime: __expectString(output.BatchRecordsEndTime), + BatchRecordsStartTime: __expectString(output.BatchRecordsStartTime), + CreatedAt: + output.CreatedAt !== undefined && output.CreatedAt !== null + ? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedAt))) + : undefined, + ExecutionMessage: __expectString(output.ExecutionMessage), + FlowName: __expectString(output.FlowName), + LastUpdatedAt: + output.LastUpdatedAt !== undefined && output.LastUpdatedAt !== null + ? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdatedAt))) + : undefined, + RecordsProcessed: __expectLong(output.RecordsProcessed), + Status: __expectString(output.Status), + } as any; +}; + const deserializeAws_restJson1Attributes = (output: any, context: __SerdeContext): { [key: string]: string } => { return Object.entries(output).reduce((acc: { [key: string]: string }, [key, value]: [string, any]) => { if (value === null) { @@ -4429,6 +5074,7 @@ const deserializeAws_restJson1ListIntegrationItem = (output: any, context: __Ser ? deserializeAws_restJson1TagMap(output.Tags, context) : undefined, Uri: __expectString(output.Uri), + WorkflowId: __expectString(output.WorkflowId), } as any; }; @@ -4476,6 +5122,23 @@ const deserializeAws_restJson1ListProfileObjectTypeTemplateItem = ( } as any; }; +const deserializeAws_restJson1ListWorkflowsItem = (output: any, context: __SerdeContext): ListWorkflowsItem => { + return { + CreatedAt: + output.CreatedAt !== undefined && output.CreatedAt !== null + ? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.CreatedAt))) + : undefined, + LastUpdatedAt: + output.LastUpdatedAt !== undefined && output.LastUpdatedAt !== null + ? __expectNonNull(__parseEpochTimestamp(__expectNumber(output.LastUpdatedAt))) + : undefined, + Status: __expectString(output.Status), + StatusDescription: __expectString(output.StatusDescription), + WorkflowId: __expectString(output.WorkflowId), + WorkflowType: __expectString(output.WorkflowType), + } as any; +}; + const deserializeAws_restJson1MatchesList = (output: any, context: __SerdeContext): MatchItem[] => { const retVal = (output || []) .filter((e: any) => e != null) @@ -4747,6 +5410,57 @@ const deserializeAws_restJson1TagMap = (output: any, context: __SerdeContext): { }, {}); }; +const deserializeAws_restJson1WorkflowAttributes = (output: any, context: __SerdeContext): WorkflowAttributes => { + return { + AppflowIntegration: + output.AppflowIntegration !== undefined && output.AppflowIntegration !== null + ? deserializeAws_restJson1AppflowIntegrationWorkflowAttributes(output.AppflowIntegration, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1WorkflowList = (output: any, context: __SerdeContext): ListWorkflowsItem[] => { + const retVal = (output || []) + .filter((e: any) => e != null) + .map((entry: any) => { + if (entry === null) { + return null as any; + } + return deserializeAws_restJson1ListWorkflowsItem(entry, context); + }); + return retVal; +}; + +const deserializeAws_restJson1WorkflowMetrics = (output: any, context: __SerdeContext): WorkflowMetrics => { + return { + AppflowIntegration: + output.AppflowIntegration !== undefined && output.AppflowIntegration !== null + ? deserializeAws_restJson1AppflowIntegrationWorkflowMetrics(output.AppflowIntegration, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1WorkflowStepItem = (output: any, context: __SerdeContext): WorkflowStepItem => { + return { + AppflowIntegration: + output.AppflowIntegration !== undefined && output.AppflowIntegration !== null + ? deserializeAws_restJson1AppflowIntegrationWorkflowStep(output.AppflowIntegration, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1WorkflowStepsList = (output: any, context: __SerdeContext): WorkflowStepItem[] => { + const retVal = (output || []) + .filter((e: any) => e != null) + .map((entry: any) => { + if (entry === null) { + return null as any; + } + return deserializeAws_restJson1WorkflowStepItem(entry, context); + }); + return retVal; +}; + const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ httpStatusCode: output.statusCode, requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"], diff --git a/clients/client-dynamodb/src/DynamoDB.ts b/clients/client-dynamodb/src/DynamoDB.ts index 6258151ea0b5..a3b183ef193c 100644 --- a/clients/client-dynamodb/src/DynamoDB.ts +++ b/clients/client-dynamodb/src/DynamoDB.ts @@ -1306,6 +1306,16 @@ export class DynamoDB extends DynamoDBClient { /** *

      This operation allows you to perform reads and singleton writes on data stored in * DynamoDB, using PartiQL.

      + *

      For PartiQL reads (SELECT statement), if the total number of processed + * items exceeds the maximum dataset size limit of 1 MB, the read stops and results are + * returned to the user as a LastEvaluatedKey value to continue the read in a + * subsequent operation. If the filter criteria in WHERE clause does not match + * any data, the read will return an empty result set.

      + *

      A single SELECT statement response can return up to the maximum number of + * items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any + * filtering to the results using WHERE clause). If + * LastEvaluatedKey is present in the response, you need to paginate the + * result set.

      */ public executeStatement( args: ExecuteStatementCommandInput, diff --git a/clients/client-dynamodb/src/commands/ExecuteStatementCommand.ts b/clients/client-dynamodb/src/commands/ExecuteStatementCommand.ts index 3dc32baff462..8a11ebabc173 100644 --- a/clients/client-dynamodb/src/commands/ExecuteStatementCommand.ts +++ b/clients/client-dynamodb/src/commands/ExecuteStatementCommand.ts @@ -24,6 +24,16 @@ export interface ExecuteStatementCommandOutput extends ExecuteStatementOutput, _ /** *

      This operation allows you to perform reads and singleton writes on data stored in * DynamoDB, using PartiQL.

      + *

      For PartiQL reads (SELECT statement), if the total number of processed + * items exceeds the maximum dataset size limit of 1 MB, the read stops and results are + * returned to the user as a LastEvaluatedKey value to continue the read in a + * subsequent operation. If the filter criteria in WHERE clause does not match + * any data, the read will return an empty result set.

      + *

      A single SELECT statement response can return up to the maximum number of + * items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any + * filtering to the results using WHERE clause). If + * LastEvaluatedKey is present in the response, you need to paginate the + * result set.

      * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-dynamodb/src/models/models_0.ts b/clients/client-dynamodb/src/models/models_0.ts index 9265831c2dd9..0b29dfb7aa08 100644 --- a/clients/client-dynamodb/src/models/models_0.ts +++ b/clients/client-dynamodb/src/models/models_0.ts @@ -7170,6 +7170,18 @@ export interface ExecuteStatementInput { * */ ReturnConsumedCapacity?: ReturnConsumedCapacity | string; + + /** + *

      The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, along + * with a key in LastEvaluatedKey to apply in a subsequent operation so you + * can pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation.

      + */ + Limit?: number; } export namespace ExecuteStatementInput { @@ -7545,51 +7557,6 @@ export namespace PutRequest { }); } -export interface ExecuteStatementOutput { - /** - *

      If a read operation was used, this property will contain the result of the read - * operation; a map of attribute names and their values. For the write operations this - * value will be empty.

      - */ - Items?: { [key: string]: AttributeValue }[]; - - /** - *

      If the response of a read request exceeds the response payload limit DynamoDB will set - * this value in the response. If set, you can use that this value in the subsequent - * request to get the remaining results.

      - */ - NextToken?: string; - - /** - *

      The capacity units consumed by an operation. The data returned includes the total - * provisioned throughput consumed, along with statistics for the table and any indexes - * involved in the operation. ConsumedCapacity is only returned if the request - * asked for it. For more information, see Provisioned Throughput in the Amazon DynamoDB Developer - * Guide.

      - */ - ConsumedCapacity?: ConsumedCapacity; -} - -export namespace ExecuteStatementOutput { - /** - * @internal - */ - export const filterSensitiveLog = (obj: ExecuteStatementOutput): any => ({ - ...obj, - ...(obj.Items && { - Items: obj.Items.map((item) => - Object.entries(item).reduce( - (acc: any, [key, value]: [string, AttributeValue]) => ({ - ...acc, - [key]: AttributeValue.filterSensitiveLog(value), - }), - {} - ) - ), - }), - }); -} - /** *

      Represents a set of primary keys and, for each key, the attributes to retrieve from * the table.

      @@ -9006,6 +8973,71 @@ export namespace DeleteItemOutput { }); } +export interface ExecuteStatementOutput { + /** + *

      If a read operation was used, this property will contain the result of the read + * operation; a map of attribute names and their values. For the write operations this + * value will be empty.

      + */ + Items?: { [key: string]: AttributeValue }[]; + + /** + *

      If the response of a read request exceeds the response payload limit DynamoDB will set + * this value in the response. If set, you can use that this value in the subsequent + * request to get the remaining results.

      + */ + NextToken?: string; + + /** + *

      The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned Throughput in the Amazon DynamoDB Developer + * Guide.

      + */ + ConsumedCapacity?: ConsumedCapacity; + + /** + *

      The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request. If LastEvaluatedKey is empty, then the "last page" of results has + * been processed and there is no more data to be retrieved. If + * LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

      + */ + LastEvaluatedKey?: { [key: string]: AttributeValue }; +} + +export namespace ExecuteStatementOutput { + /** + * @internal + */ + export const filterSensitiveLog = (obj: ExecuteStatementOutput): any => ({ + ...obj, + ...(obj.Items && { + Items: obj.Items.map((item) => + Object.entries(item).reduce( + (acc: any, [key, value]: [string, AttributeValue]) => ({ + ...acc, + [key]: AttributeValue.filterSensitiveLog(value), + }), + {} + ) + ), + }), + ...(obj.LastEvaluatedKey && { + LastEvaluatedKey: Object.entries(obj.LastEvaluatedKey).reduce( + (acc: any, [key, value]: [string, AttributeValue]) => ({ + ...acc, + [key]: AttributeValue.filterSensitiveLog(value), + }), + {} + ), + }), + }); +} + /** *

      Represents the output of a PutItem operation.

      */ diff --git a/clients/client-dynamodb/src/protocols/Aws_json1_0.ts b/clients/client-dynamodb/src/protocols/Aws_json1_0.ts index ae7271b8ebec..a4af63bcc3af 100644 --- a/clients/client-dynamodb/src/protocols/Aws_json1_0.ts +++ b/clients/client-dynamodb/src/protocols/Aws_json1_0.ts @@ -4606,6 +4606,7 @@ const serializeAws_json1_0ExecuteStatementInput = (input: ExecuteStatementInput, return { ...(input.ConsistentRead !== undefined && input.ConsistentRead !== null && { ConsistentRead: input.ConsistentRead }), + ...(input.Limit !== undefined && input.Limit !== null && { Limit: input.Limit }), ...(input.NextToken !== undefined && input.NextToken !== null && { NextToken: input.NextToken }), ...(input.Parameters !== undefined && input.Parameters !== null && { @@ -6870,6 +6871,10 @@ const deserializeAws_json1_0ExecuteStatementOutput = (output: any, context: __Se output.Items !== undefined && output.Items !== null ? deserializeAws_json1_0ItemList(output.Items, context) : undefined, + LastEvaluatedKey: + output.LastEvaluatedKey !== undefined && output.LastEvaluatedKey !== null + ? deserializeAws_json1_0Key(output.LastEvaluatedKey, context) + : undefined, NextToken: __expectString(output.NextToken), } as any; }; diff --git a/clients/client-ec2/src/endpoints.ts b/clients/client-ec2/src/endpoints.ts index a6f7e5b4c3da..24644906e903 100644 --- a/clients/client-ec2/src/endpoints.ts +++ b/clients/client-ec2/src/endpoints.ts @@ -208,7 +208,7 @@ const partitionHash: PartitionHash = { tags: [], }, { - hostname: "ec2-fips.{region}.amazonaws.com", + hostname: "ec2.{region}.amazonaws.com", tags: ["fips"], }, { diff --git a/clients/client-imagebuilder/src/Imagebuilder.ts b/clients/client-imagebuilder/src/Imagebuilder.ts index 2621675ccfbb..a8401d55ab4f 100644 --- a/clients/client-imagebuilder/src/Imagebuilder.ts +++ b/clients/client-imagebuilder/src/Imagebuilder.ts @@ -274,7 +274,17 @@ export class Imagebuilder extends ImagebuilderClient { /** *

      Creates a new component that can be used to build, validate, test, and assess your - * image.

      + * image. The component is based on a YAML document that you specify using exactly one + * of the following methods:

      + *
        + *
      • + *

        Inline, using the data property in the request body.

        + *
      • + *
      • + *

        A URL that points to a YAML document file stored in Amazon S3, using the + * uri property in the request body.

        + *
      • + *
      */ public createComponent( args: CreateComponentCommandInput, diff --git a/clients/client-imagebuilder/src/commands/CreateComponentCommand.ts b/clients/client-imagebuilder/src/commands/CreateComponentCommand.ts index f6d08970802e..7d9922129276 100644 --- a/clients/client-imagebuilder/src/commands/CreateComponentCommand.ts +++ b/clients/client-imagebuilder/src/commands/CreateComponentCommand.ts @@ -23,7 +23,17 @@ export interface CreateComponentCommandOutput extends CreateComponentResponse, _ /** *

      Creates a new component that can be used to build, validate, test, and assess your - * image.

      + * image. The component is based on a YAML document that you specify using exactly one + * of the following methods:

      + *
        + *
      • + *

        Inline, using the data property in the request body.

        + *
      • + *
      • + *

        A URL that points to a YAML document file stored in Amazon S3, using the + * uri property in the request body.

        + *
      • + *
      * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-imagebuilder/src/models/models_0.ts b/clients/client-imagebuilder/src/models/models_0.ts index 494e760fbf53..18be927df318 100644 --- a/clients/client-imagebuilder/src/models/models_0.ts +++ b/clients/client-imagebuilder/src/models/models_0.ts @@ -40,11 +40,19 @@ export interface AdditionalInstanceConfiguration { /** *

      Use this property to provide commands or a command script to run when you launch * your build instance.

      + *

      The userDataOverride property replaces any commands that Image Builder might have added to ensure + * that Systems Manager is installed on your Linux build instance. If you override the user data, + * make sure that you add commands to install Systems Manager, if it is not pre-installed on your + * base image.

      * - *

      The userDataOverride property replaces any commands that Image Builder might have added to ensure - * that Systems Manager is installed on your Linux build instance. If you override the user data, - * make sure that you add commands to install Systems Manager, if it is not pre-installed on your - * base image.

      + *

      The user data is always base 64 encoded. For example, the + * following commands are encoded as IyEvYmluL2Jhc2gKbWtkaXIgLXAgL3Zhci9iYi8KdG91Y2ggL3Zhci$:

      + * + *

      + * #!/bin/bash + *

      + *

      mkdir -p /var/bb/

      + *

      touch /var

      *
      */ userDataOverride?: string; @@ -574,7 +582,7 @@ export interface Component { owner?: string; /** - *

      The data of the component.

      + *

      Component data contains the YAML document content for the component.

      */ data?: string; @@ -1276,16 +1284,19 @@ export interface CreateComponentRequest { supportedOsVersions?: string[]; /** - *

      The data of the component. Used to specify the data inline. Either data or - * uri can be used to specify the data within the component.

      + *

      Component data contains inline YAML document content for the component. + * Alternatively, you can specify the uri of a YAML document file stored in + * Amazon S3. However, you cannot specify both properties.

      */ data?: string; /** - *

      The uri of the component. Must be an Amazon S3 URL and the requester must have permission to - * access the Amazon S3 bucket. If you use Amazon S3, you can specify component content up to your service - * quota. Either data or uri can be used to specify the data within the - * component.

      + *

      The uri of a YAML component document file. This must be an S3 URL + * (s3://bucket/key), and the requester must have permission to access + * the S3 bucket it points to. If you use Amazon S3, you can specify component content + * up to your service quota.

      + *

      Alternatively, you can specify the YAML document inline, using the component + * data property. You cannot specify both properties.

      */ uri?: string; @@ -1546,6 +1557,106 @@ export class ResourceAlreadyExistsException extends __BaseException { } } +/** + *

      Identifies the launch template that the associated Windows AMI uses for + * launching an instance when faster launching is enabled.

      + * + *

      You can specify either the launchTemplateName or the + * launchTemplateId, but not both.

      + *
      + */ +export interface FastLaunchLaunchTemplateSpecification { + /** + *

      The ID of the launch template to use for faster launching for a Windows AMI.

      + */ + launchTemplateId?: string; + + /** + *

      The name of the launch template to use for faster launching for a Windows AMI.

      + */ + launchTemplateName?: string; + + /** + *

      The version of the launch template to use for faster launching for a Windows AMI.

      + */ + launchTemplateVersion?: string; +} + +export namespace FastLaunchLaunchTemplateSpecification { + /** + * @internal + */ + export const filterSensitiveLog = (obj: FastLaunchLaunchTemplateSpecification): any => ({ + ...obj, + }); +} + +/** + *

      Configuration settings for creating and managing pre-provisioned snapshots + * for a fast-launch enabled Windows AMI.

      + */ +export interface FastLaunchSnapshotConfiguration { + /** + *

      The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled + * Windows AMI.

      + */ + targetResourceCount?: number; +} + +export namespace FastLaunchSnapshotConfiguration { + /** + * @internal + */ + export const filterSensitiveLog = (obj: FastLaunchSnapshotConfiguration): any => ({ + ...obj, + }); +} + +/** + *

      Define and configure faster launching for output Windows AMIs.

      + */ +export interface FastLaunchConfiguration { + /** + *

      A Boolean that represents the current state of faster launching for the + * Windows AMI. Set to true to start using Windows faster launching, or + * false to stop using it.

      + */ + enabled: boolean | undefined; + + /** + *

      Configuration settings for managing the number of snapshots that are + * created from pre-provisioned instances for the Windows AMI when faster + * launching is enabled.

      + */ + snapshotConfiguration?: FastLaunchSnapshotConfiguration; + + /** + *

      The maximum number of parallel instances that are launched for creating + * resources.

      + */ + maxParallelLaunches?: number; + + /** + *

      The launch template that the fast-launch enabled Windows AMI uses when it + * launches Windows instances to create pre-provisioned snapshots.

      + */ + launchTemplate?: FastLaunchLaunchTemplateSpecification; + + /** + *

      The owner account ID for the fast-launch enabled Windows AMI.

      + */ + accountId?: string; +} + +export namespace FastLaunchConfiguration { + /** + * @internal + */ + export const filterSensitiveLog = (obj: FastLaunchConfiguration): any => ({ + ...obj, + }); +} + /** *

      Identifies an Amazon EC2 launch template to use for a specific account.

      */ @@ -1672,6 +1783,11 @@ export interface Distribution { * using a file format that is compatible with your VMs in that Region.

      */ s3ExportConfiguration?: S3ExportConfiguration; + + /** + *

      The Windows faster-launching configurations to use for AMI distribution.

      + */ + fastLaunchConfigurations?: FastLaunchConfiguration[]; } export namespace Distribution { diff --git a/clients/client-imagebuilder/src/protocols/Aws_restJson1.ts b/clients/client-imagebuilder/src/protocols/Aws_restJson1.ts index 32d5eb76617c..4787e49fbd87 100644 --- a/clients/client-imagebuilder/src/protocols/Aws_restJson1.ts +++ b/clients/client-imagebuilder/src/protocols/Aws_restJson1.ts @@ -165,6 +165,9 @@ import { DistributionConfiguration, DistributionConfigurationSummary, EbsInstanceBlockDeviceSpecification, + FastLaunchConfiguration, + FastLaunchLaunchTemplateSpecification, + FastLaunchSnapshotConfiguration, Filter, ForbiddenException, IdempotentParameterMismatchException, @@ -5570,6 +5573,13 @@ const serializeAws_restJson1Distribution = (input: Distribution, context: __Serd context ), }), + ...(input.fastLaunchConfigurations !== undefined && + input.fastLaunchConfigurations !== null && { + fastLaunchConfigurations: serializeAws_restJson1FastLaunchConfigurationList( + input.fastLaunchConfigurations, + context + ), + }), ...(input.launchTemplateConfigurations !== undefined && input.launchTemplateConfigurations !== null && { launchTemplateConfigurations: serializeAws_restJson1LaunchTemplateConfigurationList( @@ -5620,6 +5630,67 @@ const serializeAws_restJson1EbsInstanceBlockDeviceSpecification = ( }; }; +const serializeAws_restJson1FastLaunchConfiguration = ( + input: FastLaunchConfiguration, + context: __SerdeContext +): any => { + return { + ...(input.accountId !== undefined && input.accountId !== null && { accountId: input.accountId }), + ...(input.enabled !== undefined && input.enabled !== null && { enabled: input.enabled }), + ...(input.launchTemplate !== undefined && + input.launchTemplate !== null && { + launchTemplate: serializeAws_restJson1FastLaunchLaunchTemplateSpecification(input.launchTemplate, context), + }), + ...(input.maxParallelLaunches !== undefined && + input.maxParallelLaunches !== null && { maxParallelLaunches: input.maxParallelLaunches }), + ...(input.snapshotConfiguration !== undefined && + input.snapshotConfiguration !== null && { + snapshotConfiguration: serializeAws_restJson1FastLaunchSnapshotConfiguration( + input.snapshotConfiguration, + context + ), + }), + }; +}; + +const serializeAws_restJson1FastLaunchConfigurationList = ( + input: FastLaunchConfiguration[], + context: __SerdeContext +): any => { + return input + .filter((e: any) => e != null) + .map((entry) => { + if (entry === null) { + return null as any; + } + return serializeAws_restJson1FastLaunchConfiguration(entry, context); + }); +}; + +const serializeAws_restJson1FastLaunchLaunchTemplateSpecification = ( + input: FastLaunchLaunchTemplateSpecification, + context: __SerdeContext +): any => { + return { + ...(input.launchTemplateId !== undefined && + input.launchTemplateId !== null && { launchTemplateId: input.launchTemplateId }), + ...(input.launchTemplateName !== undefined && + input.launchTemplateName !== null && { launchTemplateName: input.launchTemplateName }), + ...(input.launchTemplateVersion !== undefined && + input.launchTemplateVersion !== null && { launchTemplateVersion: input.launchTemplateVersion }), + }; +}; + +const serializeAws_restJson1FastLaunchSnapshotConfiguration = ( + input: FastLaunchSnapshotConfiguration, + context: __SerdeContext +): any => { + return { + ...(input.targetResourceCount !== undefined && + input.targetResourceCount !== null && { targetResourceCount: input.targetResourceCount }), + }; +}; + const serializeAws_restJson1Filter = (input: Filter, context: __SerdeContext): any => { return { ...(input.name !== undefined && input.name !== null && { name: input.name }), @@ -6302,6 +6373,10 @@ const deserializeAws_restJson1Distribution = (output: any, context: __SerdeConte output.containerDistributionConfiguration !== undefined && output.containerDistributionConfiguration !== null ? deserializeAws_restJson1ContainerDistributionConfiguration(output.containerDistributionConfiguration, context) : undefined, + fastLaunchConfigurations: + output.fastLaunchConfigurations !== undefined && output.fastLaunchConfigurations !== null + ? deserializeAws_restJson1FastLaunchConfigurationList(output.fastLaunchConfigurations, context) + : undefined, launchTemplateConfigurations: output.launchTemplateConfigurations !== undefined && output.launchTemplateConfigurations !== null ? deserializeAws_restJson1LaunchTemplateConfigurationList(output.launchTemplateConfigurations, context) @@ -6404,6 +6479,60 @@ const deserializeAws_restJson1EbsInstanceBlockDeviceSpecification = ( } as any; }; +const deserializeAws_restJson1FastLaunchConfiguration = ( + output: any, + context: __SerdeContext +): FastLaunchConfiguration => { + return { + accountId: __expectString(output.accountId), + enabled: __expectBoolean(output.enabled), + launchTemplate: + output.launchTemplate !== undefined && output.launchTemplate !== null + ? deserializeAws_restJson1FastLaunchLaunchTemplateSpecification(output.launchTemplate, context) + : undefined, + maxParallelLaunches: __expectInt32(output.maxParallelLaunches), + snapshotConfiguration: + output.snapshotConfiguration !== undefined && output.snapshotConfiguration !== null + ? deserializeAws_restJson1FastLaunchSnapshotConfiguration(output.snapshotConfiguration, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1FastLaunchConfigurationList = ( + output: any, + context: __SerdeContext +): FastLaunchConfiguration[] => { + const retVal = (output || []) + .filter((e: any) => e != null) + .map((entry: any) => { + if (entry === null) { + return null as any; + } + return deserializeAws_restJson1FastLaunchConfiguration(entry, context); + }); + return retVal; +}; + +const deserializeAws_restJson1FastLaunchLaunchTemplateSpecification = ( + output: any, + context: __SerdeContext +): FastLaunchLaunchTemplateSpecification => { + return { + launchTemplateId: __expectString(output.launchTemplateId), + launchTemplateName: __expectString(output.launchTemplateName), + launchTemplateVersion: __expectString(output.launchTemplateVersion), + } as any; +}; + +const deserializeAws_restJson1FastLaunchSnapshotConfiguration = ( + output: any, + context: __SerdeContext +): FastLaunchSnapshotConfiguration => { + return { + targetResourceCount: __expectInt32(output.targetResourceCount), + } as any; +}; + const deserializeAws_restJson1Image = (output: any, context: __SerdeContext): Image => { return { arn: __expectString(output.arn), diff --git a/clients/client-iotsecuretunneling/src/endpoints.ts b/clients/client-iotsecuretunneling/src/endpoints.ts index 3303ebfb00ac..88df6e859669 100644 --- a/clients/client-iotsecuretunneling/src/endpoints.ts +++ b/clients/client-iotsecuretunneling/src/endpoints.ts @@ -1,7 +1,64 @@ import { getRegionInfo, PartitionHash, RegionHash } from "@aws-sdk/config-resolver"; import { RegionInfoProvider, RegionInfoProviderOptions } from "@aws-sdk/types"; -const regionHash: RegionHash = {}; +const regionHash: RegionHash = { + "ca-central-1": { + variants: [ + { + hostname: "api.tunneling.iot-fips.ca-central-1.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-east-1": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-east-1.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-east-2": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-east-2.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-gov-east-1": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-gov-west-1": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-west-1": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-west-1.amazonaws.com", + tags: ["fips"], + }, + ], + }, + "us-west-2": { + variants: [ + { + hostname: "api.tunneling.iot-fips.us-west-2.amazonaws.com", + tags: ["fips"], + }, + ], + }, +}; const partitionHash: PartitionHash = { aws: { @@ -22,6 +79,11 @@ const partitionHash: PartitionHash = { "eu-west-1", "eu-west-2", "eu-west-3", + "fips-ca-central-1", + "fips-us-east-1", + "fips-us-east-2", + "fips-us-west-1", + "fips-us-west-2", "me-south-1", "sa-east-1", "us-east-1", @@ -100,7 +162,7 @@ const partitionHash: PartitionHash = { ], }, "aws-us-gov": { - regions: ["us-gov-east-1", "us-gov-west-1"], + regions: ["fips-us-gov-east-1", "fips-us-gov-west-1", "us-gov-east-1", "us-gov-west-1"], regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", variants: [ { diff --git a/clients/client-lambda/src/Lambda.ts b/clients/client-lambda/src/Lambda.ts index 337ccc8dbbfe..99f56099447c 100644 --- a/clients/client-lambda/src/Lambda.ts +++ b/clients/client-lambda/src/Lambda.ts @@ -2394,11 +2394,24 @@ export class Lambda extends LambdaClient { *

      Updates a Lambda function's code. If code signing is enabled for the function, the code package must be signed * by a trusted publisher. For more information, see Configuring code signing.

      * + *

      If the function's package type is Image, you must specify the code package in ImageUri as + * the URI of a + * container image + * in the Amazon ECR registry. + *

      + * + *

      If the function's package type is Zip, you must specify the deployment + * package as a .zip file + * archive. Enter the Amazon S3 bucket and key of the code .zip file location. + * You can also provide the function code inline using the ZipFile field.

      + *

      The code in the deployment package must be compatible with the target instruction set + * architecture of the function (x86-64 or arm64).

      + * *

      The function's code is locked when you publish a version. You can't modify the code of a published version, * only the unpublished version.

      * *

      For a function defined as a container image, Lambda resolves the image tag to an image digest. In Amazon ECR, if - * you update the image tag to a new image, Lambda does not automatically update the function.

      + * you update the image tag to a new image, Lambda does not automatically update the function.

      *
      */ public updateFunctionCode( diff --git a/clients/client-lambda/src/commands/UpdateFunctionCodeCommand.ts b/clients/client-lambda/src/commands/UpdateFunctionCodeCommand.ts index 18cc63c7f1d6..496d0063d0ba 100644 --- a/clients/client-lambda/src/commands/UpdateFunctionCodeCommand.ts +++ b/clients/client-lambda/src/commands/UpdateFunctionCodeCommand.ts @@ -25,11 +25,24 @@ export interface UpdateFunctionCodeCommandOutput extends FunctionConfiguration, *

      Updates a Lambda function's code. If code signing is enabled for the function, the code package must be signed * by a trusted publisher. For more information, see Configuring code signing.

      * + *

      If the function's package type is Image, you must specify the code package in ImageUri as + * the URI of a + * container image + * in the Amazon ECR registry. + *

      + * + *

      If the function's package type is Zip, you must specify the deployment + * package as a .zip file + * archive. Enter the Amazon S3 bucket and key of the code .zip file location. + * You can also provide the function code inline using the ZipFile field.

      + *

      The code in the deployment package must be compatible with the target instruction set + * architecture of the function (x86-64 or arm64).

      + * *

      The function's code is locked when you publish a version. You can't modify the code of a published version, * only the unpublished version.

      * *

      For a function defined as a container image, Lambda resolves the image tag to an image digest. In Amazon ECR, if - * you update the image tag to a new image, Lambda does not automatically update the function.

      + * you update the image tag to a new image, Lambda does not automatically update the function.

      *
      * @example * Use a bare-bones client and the command you need to make an API call. diff --git a/clients/client-lambda/src/models/models_0.ts b/clients/client-lambda/src/models/models_0.ts index c0a03ee108c6..a48142710792 100644 --- a/clients/client-lambda/src/models/models_0.ts +++ b/clients/client-lambda/src/models/models_0.ts @@ -1441,6 +1441,7 @@ export enum PackageType { } export enum Runtime { + dotnet6 = "dotnet6", dotnetcore10 = "dotnetcore1.0", dotnetcore20 = "dotnetcore2.0", dotnetcore21 = "dotnetcore2.1", @@ -5638,18 +5639,19 @@ export interface UpdateFunctionCodeRequest { FunctionName: string | undefined; /** - *

      The base64-encoded contents of the deployment package. Amazon Web Services SDK and Amazon Web Services CLI clients handle the encoding for - * you.

      + *

      The base64-encoded contents of the deployment package. Amazon Web Services SDK and Amazon Web Services CLI clients + * handle the encoding for you. Use only with a function defined with a .zip file archive deployment package.

      */ ZipFile?: Uint8Array; /** - *

      An Amazon S3 bucket in the same Amazon Web Services Region as your function. The bucket can be in a different Amazon Web Services account.

      + *

      An Amazon S3 bucket in the same Amazon Web Services Region as your function. The bucket can be in a different + * Amazon Web Services account. Use only with a function defined with a .zip file archive deployment package.

      */ S3Bucket?: string; /** - *

      The Amazon S3 key of the deployment package.

      + *

      The Amazon S3 key of the deployment package. Use only with a function defined with a .zip file archive deployment package.

      */ S3Key?: string; @@ -5659,7 +5661,8 @@ export interface UpdateFunctionCodeRequest { S3ObjectVersion?: string; /** - *

      URI of a container image in the Amazon ECR registry.

      + *

      URI of a container image in the Amazon ECR registry. Do not use for a function defined + * with a .zip file archive.

      */ ImageUri?: string; diff --git a/clients/client-lambda/src/waiters/index.ts b/clients/client-lambda/src/waiters/index.ts index 108d52ffeab6..dcfbc1a1ef8a 100644 --- a/clients/client-lambda/src/waiters/index.ts +++ b/clients/client-lambda/src/waiters/index.ts @@ -1,3 +1,5 @@ export * from "./waitForFunctionActive"; +export * from "./waitForFunctionActiveV2"; export * from "./waitForFunctionExists"; export * from "./waitForFunctionUpdated"; +export * from "./waitForFunctionUpdatedV2"; diff --git a/clients/client-lambda/src/waiters/waitForFunctionActive.ts b/clients/client-lambda/src/waiters/waitForFunctionActive.ts index e911dbb1ac37..a6fab6ba63e7 100644 --- a/clients/client-lambda/src/waiters/waitForFunctionActive.ts +++ b/clients/client-lambda/src/waiters/waitForFunctionActive.ts @@ -41,7 +41,7 @@ const checkState = async (client: LambdaClient, input: GetFunctionConfigurationC return { state: WaiterState.RETRY, reason }; }; /** - * Waits for the function's State to be Active. + * Waits for the function's State to be Active. This waiter uses GetFunctionConfiguration API. This should be used after new function creation. * @deprecated Use waitUntilFunctionActive instead. waitForFunctionActive does not throw error in non-success cases. */ export const waitForFunctionActive = async ( @@ -52,7 +52,7 @@ export const waitForFunctionActive = async ( return createWaiter({ ...serviceDefaults, ...params }, input, checkState); }; /** - * Waits for the function's State to be Active. + * Waits for the function's State to be Active. This waiter uses GetFunctionConfiguration API. This should be used after new function creation. * @param params - Waiter configuration options. * @param input - The input to GetFunctionConfigurationCommand for polling. */ diff --git a/clients/client-lambda/src/waiters/waitForFunctionActiveV2.ts b/clients/client-lambda/src/waiters/waitForFunctionActiveV2.ts new file mode 100644 index 000000000000..8b42a9242b8e --- /dev/null +++ b/clients/client-lambda/src/waiters/waitForFunctionActiveV2.ts @@ -0,0 +1,63 @@ +import { checkExceptions, createWaiter, WaiterConfiguration, WaiterResult, WaiterState } from "@aws-sdk/util-waiter"; + +import { GetFunctionCommand, GetFunctionCommandInput } from "../commands/GetFunctionCommand"; +import { LambdaClient } from "../LambdaClient"; + +const checkState = async (client: LambdaClient, input: GetFunctionCommandInput): Promise => { + let reason; + try { + const result: any = await client.send(new GetFunctionCommand(input)); + reason = result; + try { + const returnComparator = () => { + return result.Configuration.State; + }; + if (returnComparator() === "Active") { + return { state: WaiterState.SUCCESS, reason }; + } + } catch (e) {} + try { + const returnComparator = () => { + return result.Configuration.State; + }; + if (returnComparator() === "Failed") { + return { state: WaiterState.FAILURE, reason }; + } + } catch (e) {} + try { + const returnComparator = () => { + return result.Configuration.State; + }; + if (returnComparator() === "Pending") { + return { state: WaiterState.RETRY, reason }; + } + } catch (e) {} + } catch (exception) { + reason = exception; + } + return { state: WaiterState.RETRY, reason }; +}; +/** + * Waits for the function's State to be Active. This waiter uses GetFunction API. This should be used after new function creation. + * @deprecated Use waitUntilFunctionActiveV2 instead. waitForFunctionActiveV2 does not throw error in non-success cases. + */ +export const waitForFunctionActiveV2 = async ( + params: WaiterConfiguration, + input: GetFunctionCommandInput +): Promise => { + const serviceDefaults = { minDelay: 1, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +/** + * Waits for the function's State to be Active. This waiter uses GetFunction API. This should be used after new function creation. + * @param params - Waiter configuration options. + * @param input - The input to GetFunctionCommand for polling. + */ +export const waitUntilFunctionActiveV2 = async ( + params: WaiterConfiguration, + input: GetFunctionCommandInput +): Promise => { + const serviceDefaults = { minDelay: 1, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/clients/client-lambda/src/waiters/waitForFunctionUpdated.ts b/clients/client-lambda/src/waiters/waitForFunctionUpdated.ts index ff05a8ec4561..cf00104c3280 100644 --- a/clients/client-lambda/src/waiters/waitForFunctionUpdated.ts +++ b/clients/client-lambda/src/waiters/waitForFunctionUpdated.ts @@ -41,7 +41,7 @@ const checkState = async (client: LambdaClient, input: GetFunctionConfigurationC return { state: WaiterState.RETRY, reason }; }; /** - * Waits for the function's LastUpdateStatus to be Successful. + * Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunctionConfiguration API. This should be used after function updates. * @deprecated Use waitUntilFunctionUpdated instead. waitForFunctionUpdated does not throw error in non-success cases. */ export const waitForFunctionUpdated = async ( @@ -52,7 +52,7 @@ export const waitForFunctionUpdated = async ( return createWaiter({ ...serviceDefaults, ...params }, input, checkState); }; /** - * Waits for the function's LastUpdateStatus to be Successful. + * Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunctionConfiguration API. This should be used after function updates. * @param params - Waiter configuration options. * @param input - The input to GetFunctionConfigurationCommand for polling. */ diff --git a/clients/client-lambda/src/waiters/waitForFunctionUpdatedV2.ts b/clients/client-lambda/src/waiters/waitForFunctionUpdatedV2.ts new file mode 100644 index 000000000000..026ea931575b --- /dev/null +++ b/clients/client-lambda/src/waiters/waitForFunctionUpdatedV2.ts @@ -0,0 +1,63 @@ +import { checkExceptions, createWaiter, WaiterConfiguration, WaiterResult, WaiterState } from "@aws-sdk/util-waiter"; + +import { GetFunctionCommand, GetFunctionCommandInput } from "../commands/GetFunctionCommand"; +import { LambdaClient } from "../LambdaClient"; + +const checkState = async (client: LambdaClient, input: GetFunctionCommandInput): Promise => { + let reason; + try { + const result: any = await client.send(new GetFunctionCommand(input)); + reason = result; + try { + const returnComparator = () => { + return result.Configuration.LastUpdateStatus; + }; + if (returnComparator() === "Successful") { + return { state: WaiterState.SUCCESS, reason }; + } + } catch (e) {} + try { + const returnComparator = () => { + return result.Configuration.LastUpdateStatus; + }; + if (returnComparator() === "Failed") { + return { state: WaiterState.FAILURE, reason }; + } + } catch (e) {} + try { + const returnComparator = () => { + return result.Configuration.LastUpdateStatus; + }; + if (returnComparator() === "InProgress") { + return { state: WaiterState.RETRY, reason }; + } + } catch (e) {} + } catch (exception) { + reason = exception; + } + return { state: WaiterState.RETRY, reason }; +}; +/** + * Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunction API. This should be used after function updates. + * @deprecated Use waitUntilFunctionUpdatedV2 instead. waitForFunctionUpdatedV2 does not throw error in non-success cases. + */ +export const waitForFunctionUpdatedV2 = async ( + params: WaiterConfiguration, + input: GetFunctionCommandInput +): Promise => { + const serviceDefaults = { minDelay: 1, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +/** + * Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunction API. This should be used after function updates. + * @param params - Waiter configuration options. + * @param input - The input to GetFunctionCommand for polling. + */ +export const waitUntilFunctionUpdatedV2 = async ( + params: WaiterConfiguration, + input: GetFunctionCommandInput +): Promise => { + const serviceDefaults = { minDelay: 1, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/clients/client-textract/src/models/models_0.ts b/clients/client-textract/src/models/models_0.ts index 281acf47c762..d91a1715db91 100644 --- a/clients/client-textract/src/models/models_0.ts +++ b/clients/client-textract/src/models/models_0.ts @@ -212,13 +212,16 @@ export enum BlockType { CELL = "CELL", KEY_VALUE_SET = "KEY_VALUE_SET", LINE = "LINE", + MERGED_CELL = "MERGED_CELL", PAGE = "PAGE", SELECTION_ELEMENT = "SELECTION_ELEMENT", TABLE = "TABLE", + TITLE = "TITLE", WORD = "WORD", } export enum EntityType { + COLUMN_HEADER = "COLUMN_HEADER", KEY = "KEY", VALUE = "VALUE", } @@ -333,6 +336,8 @@ export namespace Geometry { export enum RelationshipType { CHILD = "CHILD", COMPLEX_FEATURES = "COMPLEX_FEATURES", + MERGED_CELL = "MERGED_CELL", + TITLE = "TITLE", VALUE = "VALUE", } diff --git a/clients/client-transfer/src/models/models_0.ts b/clients/client-transfer/src/models/models_0.ts index 40dfcc78daa7..2a02ce46d604 100644 --- a/clients/client-transfer/src/models/models_0.ts +++ b/clients/client-transfer/src/models/models_0.ts @@ -147,7 +147,9 @@ export interface CopyStepDetails { Name?: string; /** - *

      Specifies the location for the file being copied. Only applicable for the Copy type of workflow steps.

      + *

      Specifies the location for the file being copied. Only applicable for Copy type workflow + * steps. Use ${Transfer:username} in this field to parametrize the destination + * prefix by username.

      */ DestinationFileLocation?: InputFileLocation; @@ -156,6 +158,22 @@ export interface CopyStepDetails { * The default is FALSE.

      */ OverwriteExisting?: OverwriteExisting | string; + + /** + *

      Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file + * for the workflow.

      + *
        + *
      • + *

        Enter ${previous.file} to use the previous file as the input. + * In this case, this workflow step uses the output file from the previous workflow step as input. + * This is the default value.

        + *
      • + *
      • + *

        Enter ${original.file} to use the originally-uploaded file location as input for this step.

        + *
      • + *
      + */ + SourceFileLocation?: string; } export namespace CopyStepDetails { @@ -174,15 +192,6 @@ export namespace CopyStepDetails { *

      * [ { "Entry:": "/", "Target": "/bucket_name/home/mydirectory" } ] *

      - * - * - *

      If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is - * ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place - * holders for your directory. If using the CLI, use the s3api or efsapi call instead of - * s3 or efs so you can use the put-object operation. For example, you use the - * following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make - * sure that the end of the key name ends in a / for it to be considered a folder.

      - *
      */ export interface HomeDirectoryMapEntry { /** @@ -279,15 +288,6 @@ export interface CreateAccessRequest { *

      * [ { "Entry:": "/", "Target": "/bucket_name/home/mydirectory" } ] *

      - * - * - *

      If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is - * ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place - * holders for your directory. If using the CLI, use the s3api or efsapi call instead of - * s3 or efs so you can use the put-object operation. For example, you use the - * following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make - * sure that the end of the key name ends in a / for it to be considered a folder.

      - *
      */ HomeDirectoryMappings?: HomeDirectoryMapEntry[]; @@ -1086,15 +1086,6 @@ export interface CreateUserRequest { *

      * [ { "Entry:": "/", "Target": "/bucket_name/home/mydirectory" } ] *

      - * - * - *

      If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is - * ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place - * holders for your directory. If using the CLI, use the s3api or efsapi call instead of - * s3 or efs so you can use the put-object operation. For example, you use the - * following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make - * sure that the end of the key name ends in a / for it to be considered a folder.

      - *
      */ HomeDirectoryMappings?: HomeDirectoryMapEntry[]; @@ -1219,6 +1210,22 @@ export interface CustomStepDetails { *

      Timeout, in seconds, for the step.

      */ TimeoutSeconds?: number; + + /** + *

      Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file + * for the workflow.

      + *
        + *
      • + *

        Enter ${previous.file} to use the previous file as the input. + * In this case, this workflow step uses the output file from the previous workflow step as input. + * This is the default value.

        + *
      • + *
      • + *

        Enter ${original.file} to use the originally-uploaded file location as input for this step.

        + *
      • + *
      + */ + SourceFileLocation?: string; } export namespace CustomStepDetails { @@ -1238,6 +1245,22 @@ export interface DeleteStepDetails { *

      The name of the step, used as an identifier.

      */ Name?: string; + + /** + *

      Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file + * for the workflow.

      + *
        + *
      • + *

        Enter ${previous.file} to use the previous file as the input. + * In this case, this workflow step uses the output file from the previous workflow step as input. + * This is the default value.

        + *
      • + *
      • + *

        Enter ${original.file} to use the originally-uploaded file location as input for this step.

        + *
      • + *
      + */ + SourceFileLocation?: string; } export namespace DeleteStepDetails { @@ -1287,6 +1310,22 @@ export interface TagStepDetails { *

      Array that contains from 1 to 10 key/value pairs.

      */ Tags?: S3Tag[]; + + /** + *

      Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file + * for the workflow.

      + *
        + *
      • + *

        Enter ${previous.file} to use the previous file as the input. + * In this case, this workflow step uses the output file from the previous workflow step as input. + * This is the default value.

        + *
      • + *
      • + *

        Enter ${original.file} to use the originally-uploaded file location as input for this step.

        + *
      • + *
      + */ + SourceFileLocation?: string; } export namespace TagStepDetails { @@ -3518,6 +3557,9 @@ export interface TestIdentityProviderResponse { /** *

      A message that indicates whether the test was successful or not.

      + * + *

      If an empty string is returned, the most likely cause is that the authentication failed due to an incorrect username or password.

      + *
      */ Message?: string; @@ -3595,15 +3637,6 @@ export interface UpdateAccessRequest { *

      * [ { "Entry:": "/", "Target": "/bucket_name/home/mydirectory" } ] *

      - * - * - *

      If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is - * ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place - * holders for your directory. If using the CLI, use the s3api or efsapi call instead of - * s3 or efs so you can use the put-object operation. For example, you use the - * following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make - * sure that the end of the key name ends in a / for it to be considered a folder.

      - *
      */ HomeDirectoryMappings?: HomeDirectoryMapEntry[]; @@ -3937,15 +3970,6 @@ export interface UpdateUserRequest { *

      * [ { "Entry:": "/", "Target": "/bucket_name/home/mydirectory" } ] *

      - * - * - *

      If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is - * ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place - * holders for your directory. If using the CLI, use the s3api or efsapi call instead of - * s3 or efs so you can use the put-object operation. For example, you use the - * following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make - * sure that the end of the key name ends in a / for it to be considered a folder.

      - *
      */ HomeDirectoryMappings?: HomeDirectoryMapEntry[]; diff --git a/clients/client-transfer/src/protocols/Aws_json1_1.ts b/clients/client-transfer/src/protocols/Aws_json1_1.ts index 324b0f9c860d..d625e214d1f5 100644 --- a/clients/client-transfer/src/protocols/Aws_json1_1.ts +++ b/clients/client-transfer/src/protocols/Aws_json1_1.ts @@ -2434,6 +2434,8 @@ const serializeAws_json1_1CopyStepDetails = (input: CopyStepDetails, context: __ ...(input.Name !== undefined && input.Name !== null && { Name: input.Name }), ...(input.OverwriteExisting !== undefined && input.OverwriteExisting !== null && { OverwriteExisting: input.OverwriteExisting }), + ...(input.SourceFileLocation !== undefined && + input.SourceFileLocation !== null && { SourceFileLocation: input.SourceFileLocation }), }; }; @@ -2533,6 +2535,8 @@ const serializeAws_json1_1CreateWorkflowRequest = (input: CreateWorkflowRequest, const serializeAws_json1_1CustomStepDetails = (input: CustomStepDetails, context: __SerdeContext): any => { return { ...(input.Name !== undefined && input.Name !== null && { Name: input.Name }), + ...(input.SourceFileLocation !== undefined && + input.SourceFileLocation !== null && { SourceFileLocation: input.SourceFileLocation }), ...(input.Target !== undefined && input.Target !== null && { Target: input.Target }), ...(input.TimeoutSeconds !== undefined && input.TimeoutSeconds !== null && { TimeoutSeconds: input.TimeoutSeconds }), @@ -2567,6 +2571,8 @@ const serializeAws_json1_1DeleteSshPublicKeyRequest = ( const serializeAws_json1_1DeleteStepDetails = (input: DeleteStepDetails, context: __SerdeContext): any => { return { ...(input.Name !== undefined && input.Name !== null && { Name: input.Name }), + ...(input.SourceFileLocation !== undefined && + input.SourceFileLocation !== null && { SourceFileLocation: input.SourceFileLocation }), }; }; @@ -2927,6 +2933,8 @@ const serializeAws_json1_1Tags = (input: Tag[], context: __SerdeContext): any => const serializeAws_json1_1TagStepDetails = (input: TagStepDetails, context: __SerdeContext): any => { return { ...(input.Name !== undefined && input.Name !== null && { Name: input.Name }), + ...(input.SourceFileLocation !== undefined && + input.SourceFileLocation !== null && { SourceFileLocation: input.SourceFileLocation }), ...(input.Tags !== undefined && input.Tags !== null && { Tags: serializeAws_json1_1S3Tags(input.Tags, context) }), }; }; @@ -3106,6 +3114,7 @@ const deserializeAws_json1_1CopyStepDetails = (output: any, context: __SerdeCont : undefined, Name: __expectString(output.Name), OverwriteExisting: __expectString(output.OverwriteExisting), + SourceFileLocation: __expectString(output.SourceFileLocation), } as any; }; @@ -3138,6 +3147,7 @@ const deserializeAws_json1_1CreateWorkflowResponse = (output: any, context: __Se const deserializeAws_json1_1CustomStepDetails = (output: any, context: __SerdeContext): CustomStepDetails => { return { Name: __expectString(output.Name), + SourceFileLocation: __expectString(output.SourceFileLocation), Target: __expectString(output.Target), TimeoutSeconds: __expectInt32(output.TimeoutSeconds), } as any; @@ -3146,6 +3156,7 @@ const deserializeAws_json1_1CustomStepDetails = (output: any, context: __SerdeCo const deserializeAws_json1_1DeleteStepDetails = (output: any, context: __SerdeContext): DeleteStepDetails => { return { Name: __expectString(output.Name), + SourceFileLocation: __expectString(output.SourceFileLocation), } as any; }; @@ -3961,6 +3972,7 @@ const deserializeAws_json1_1Tags = (output: any, context: __SerdeContext): Tag[] const deserializeAws_json1_1TagStepDetails = (output: any, context: __SerdeContext): TagStepDetails => { return { Name: __expectString(output.Name), + SourceFileLocation: __expectString(output.SourceFileLocation), Tags: output.Tags !== undefined && output.Tags !== null ? deserializeAws_json1_1S3Tags(output.Tags, context) diff --git a/clients/client-translate/src/models/models_0.ts b/clients/client-translate/src/models/models_0.ts index 77bfee37506d..cc232e3dad4a 100644 --- a/clients/client-translate/src/models/models_0.ts +++ b/clients/client-translate/src/models/models_0.ts @@ -560,6 +560,11 @@ export namespace OutputDataConfig { }); } +export enum Formality { + FORMAL = "FORMAL", + INFORMAL = "INFORMAL", +} + export enum Profanity { MASK = "MASK", } @@ -568,6 +573,7 @@ export enum Profanity { *

      Settings that configure the translation output.

      */ export interface TranslationSettings { + Formality?: Formality | string; /** *

      Enable the profanity setting if you want Amazon Translate to mask profane words and * phrases in your translation output.

      diff --git a/clients/client-translate/src/protocols/Aws_json1_1.ts b/clients/client-translate/src/protocols/Aws_json1_1.ts index a33c9edfd774..8e9ef33108a0 100644 --- a/clients/client-translate/src/protocols/Aws_json1_1.ts +++ b/clients/client-translate/src/protocols/Aws_json1_1.ts @@ -1452,6 +1452,7 @@ const serializeAws_json1_1TranslateTextRequest = (input: TranslateTextRequest, c const serializeAws_json1_1TranslationSettings = (input: TranslationSettings, context: __SerdeContext): any => { return { + ...(input.Formality !== undefined && input.Formality !== null && { Formality: input.Formality }), ...(input.Profanity !== undefined && input.Profanity !== null && { Profanity: input.Profanity }), }; }; @@ -2040,6 +2041,7 @@ const deserializeAws_json1_1TranslateTextResponse = (output: any, context: __Ser const deserializeAws_json1_1TranslationSettings = (output: any, context: __SerdeContext): TranslationSettings => { return { + Formality: __expectString(output.Formality), Profanity: __expectString(output.Profanity), } as any; }; diff --git a/clients/client-wafv2/src/WAFV2.ts b/clients/client-wafv2/src/WAFV2.ts index 0a154d67f2ad..910fadba5c6d 100644 --- a/clients/client-wafv2/src/WAFV2.ts +++ b/clients/client-wafv2/src/WAFV2.ts @@ -766,7 +766,8 @@ export class WAFV2 extends WAFV2Client { /** *

      Generates a presigned download URL for the specified release of the mobile SDK.

      - *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

      + *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

      */ public generateMobileSdkReleaseUrl( args: GenerateMobileSdkReleaseUrlCommandInput, @@ -893,7 +894,8 @@ export class WAFV2 extends WAFV2Client { /** *

      Retrieves information for the specified mobile SDK release, including release notes and tags.

      - *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

      + *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

      */ public getMobileSdkRelease( args: GetMobileSdkReleaseCommandInput, @@ -1316,7 +1318,8 @@ export class WAFV2 extends WAFV2Client { /** *

      Retrieves a list of the available releases for the mobile SDK and the specified device platform.

      - *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

      + *

      The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

      */ public listMobileSdkReleases( args: ListMobileSdkReleasesCommandInput, @@ -1515,8 +1518,11 @@ export class WAFV2 extends WAFV2Client { /** *

      Enables the specified LoggingConfiguration, to start logging from a - * web ACL, according to the configuration provided.

      - *

      You can access information about all traffic that WAF inspects using the following + * web ACL, according to the configuration provided.

      + * + *

      You can define one logging destination per web ACL.

      + *
      + *

      You can access information about the traffic that WAF inspects using the following * steps:

      *
        *
      1. @@ -1534,6 +1540,9 @@ export class WAFV2 extends WAFV2Client { * request, WAF creates an additional role or policy that is required to write * logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group. * For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

        + *

        For additional information about web ACL logging, see + * Logging web ACL traffic information + * in the WAF Developer Guide.

        * *

        This operation completely replaces the mutable specifications that you already have for the logging configuration with the ones that you provide to this call. To modify the logging configuration, retrieve it by calling GetLoggingConfiguration, update the settings as needed, and then provide the complete logging configuration specification to this call.

        *
        diff --git a/clients/client-wafv2/src/commands/GenerateMobileSdkReleaseUrlCommand.ts b/clients/client-wafv2/src/commands/GenerateMobileSdkReleaseUrlCommand.ts index 7c849d6bc48b..8dfe3e7bafc1 100644 --- a/clients/client-wafv2/src/commands/GenerateMobileSdkReleaseUrlCommand.ts +++ b/clients/client-wafv2/src/commands/GenerateMobileSdkReleaseUrlCommand.ts @@ -25,7 +25,8 @@ export interface GenerateMobileSdkReleaseUrlCommandOutput /** *

        Generates a presigned download URL for the specified release of the mobile SDK.

        - *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

        + *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

        * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-wafv2/src/commands/GetMobileSdkReleaseCommand.ts b/clients/client-wafv2/src/commands/GetMobileSdkReleaseCommand.ts index a38480ad98de..53e9ad2504b8 100644 --- a/clients/client-wafv2/src/commands/GetMobileSdkReleaseCommand.ts +++ b/clients/client-wafv2/src/commands/GetMobileSdkReleaseCommand.ts @@ -23,7 +23,8 @@ export interface GetMobileSdkReleaseCommandOutput extends GetMobileSdkReleaseRes /** *

        Retrieves information for the specified mobile SDK release, including release notes and tags.

        - *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

        + *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

        * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-wafv2/src/commands/ListMobileSdkReleasesCommand.ts b/clients/client-wafv2/src/commands/ListMobileSdkReleasesCommand.ts index c125cf5b2e3e..5cc945fcc047 100644 --- a/clients/client-wafv2/src/commands/ListMobileSdkReleasesCommand.ts +++ b/clients/client-wafv2/src/commands/ListMobileSdkReleasesCommand.ts @@ -23,7 +23,8 @@ export interface ListMobileSdkReleasesCommandOutput extends ListMobileSdkRelease /** *

        Retrieves a list of the available releases for the mobile SDK and the specified device platform.

        - *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

        + *

        The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

        * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-wafv2/src/commands/PutLoggingConfigurationCommand.ts b/clients/client-wafv2/src/commands/PutLoggingConfigurationCommand.ts index d0a5bf6075a3..07b1d5c77c07 100644 --- a/clients/client-wafv2/src/commands/PutLoggingConfigurationCommand.ts +++ b/clients/client-wafv2/src/commands/PutLoggingConfigurationCommand.ts @@ -23,8 +23,11 @@ export interface PutLoggingConfigurationCommandOutput extends PutLoggingConfigur /** *

        Enables the specified LoggingConfiguration, to start logging from a - * web ACL, according to the configuration provided.

        - *

        You can access information about all traffic that WAF inspects using the following + * web ACL, according to the configuration provided.

        + * + *

        You can define one logging destination per web ACL.

        + *
        + *

        You can access information about the traffic that WAF inspects using the following * steps:

        *
          *
        1. @@ -42,6 +45,9 @@ export interface PutLoggingConfigurationCommandOutput extends PutLoggingConfigur * request, WAF creates an additional role or policy that is required to write * logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group. * For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

          + *

          For additional information about web ACL logging, see + * Logging web ACL traffic information + * in the WAF Developer Guide.

          * *

          This operation completely replaces the mutable specifications that you already have for the logging configuration with the ones that you provide to this call. To modify the logging configuration, retrieve it by calling GetLoggingConfiguration, update the settings as needed, and then provide the complete logging configuration specification to this call.

          *
          diff --git a/clients/client-wafv2/src/models/models_0.ts b/clients/client-wafv2/src/models/models_0.ts index 6ecbda93c5e3..5dfcf362cd28 100644 --- a/clients/client-wafv2/src/models/models_0.ts +++ b/clients/client-wafv2/src/models/models_0.ts @@ -1370,7 +1370,7 @@ export namespace UsernameField { */ export interface ManagedRuleGroupConfig { /** - *

          The login endpoint for your application. For example https://example.com/web/login.

          + *

          The path of the login endpoint for your application. For example, for the URL https://example.com/web/login, you would provide the path /web/login.

          */ LoginPath?: string; @@ -2352,8 +2352,8 @@ export interface CreateIPSetRequest { IPAddressVersion: IPAddressVersion | string | undefined; /** - *

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          - *

          Examples:

          + *

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          + *

          Example address strings:

          *
            *
          • *

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            @@ -2370,6 +2370,24 @@ export interface CreateIPSetRequest { *
          • *
          *

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          + *

          Example JSON Addresses specifications:

          + *
            + *
          • + *

            Empty array: "Addresses": [] + *

            + *
          • + *
          • + *

            Array with one address: "Addresses": ["192.0.2.44/32"] + *

            + *
          • + *
          • + *

            Array with three addresses: "Addresses": ["192.0.2.44/32", "192.0.2.0/24", "192.0.0.0/16"] + *

            + *
          • + *
          • + *

            INVALID specification: "Addresses": [""] INVALID

            + *
          • + *
          */ Addresses: string[] | undefined; @@ -3434,7 +3452,7 @@ export namespace GetIPSetRequest { } /** - *

          Contains one or more IP addresses or blocks of IP addresses specified in Classless + *

          Contains zero or more IP addresses or blocks of IP addresses specified in Classless * Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges * except for /0. For information about CIDR notation, see the Wikipedia entry Classless * Inter-Domain Routing.

          @@ -3468,8 +3486,8 @@ export interface IPSet { IPAddressVersion: IPAddressVersion | string | undefined; /** - *

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          - *

          Examples:

          + *

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          + *

          Example address strings:

          *
            *
          • *

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            @@ -3486,6 +3504,24 @@ export interface IPSet { *
          • *
          *

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          + *

          Example JSON Addresses specifications:

          + *
            + *
          • + *

            Empty array: "Addresses": [] + *

            + *
          • + *
          • + *

            Array with one address: "Addresses": ["192.0.2.44/32"] + *

            + *
          • + *
          • + *

            Array with three addresses: "Addresses": ["192.0.2.44/32", "192.0.2.0/24", "192.0.0.0/16"] + *

            + *
          • + *
          • + *

            INVALID specification: "Addresses": [""] INVALID

            + *
          • + *
          */ Addresses: string[] | undefined; } @@ -3655,7 +3691,28 @@ export namespace LoggingFilter { * resource, for logging from WAF. As part of the association, you can specify parts of * the standard logging fields to keep out of the logs and you can specify filters so that you * log only a subset of the logging records.

          - *

          For information about configuring web ACL logging destinations, see + * + *

          You can define one logging destination per web ACL.

          + * + *

          You can access information about the traffic that WAF inspects using the following + * steps:

          + *
            + *
          1. + *

            Create your logging destination. You can use an Amazon CloudWatch Logs log group, an Amazon Simple Storage Service (Amazon S3) bucket, or an Amazon Kinesis Data Firehose. + * For information about configuring logging destinations and the permissions that are required for each, see + * Logging web ACL traffic information + * in the WAF Developer Guide.

            + *
          2. + *
          3. + *

            Associate your logging destination to your web ACL using a + * PutLoggingConfiguration request.

            + *
          4. + *
          + *

          When you successfully enable logging using a PutLoggingConfiguration + * request, WAF creates an additional role or policy that is required to write + * logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group. + * For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

          + *

          For additional information about web ACL logging, see * Logging web ACL traffic information * in the WAF Developer Guide.

          */ @@ -3667,8 +3724,11 @@ export interface LoggingConfiguration { ResourceArn: string | undefined; /** - *

          The Amazon Resource Names (ARNs) of the logging destinations that you want to associate + *

          The logging destination configuration that you want to associate * with the web ACL.

          + * + *

          You can associate one logging destination to a web ACL.

          + *
          */ LogDestinationConfigs: string[] | undefined; @@ -3931,7 +3991,8 @@ export namespace GetMobileSdkReleaseRequest { /** *

          Information for a release of the mobile SDK, including release notes and tags.

          - *

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

          + *

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see + * WAF client application integration in the WAF Developer Guide.

          */ export interface MobileSdkRelease { /** @@ -5834,8 +5895,8 @@ export interface UpdateIPSetRequest { Description?: string; /** - *

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          - *

          Examples:

          + *

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          + *

          Example address strings:

          *
            *
          • *

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            @@ -5852,6 +5913,24 @@ export interface UpdateIPSetRequest { *
          • *
          *

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          + *

          Example JSON Addresses specifications:

          + *
            + *
          • + *

            Empty array: "Addresses": [] + *

            + *
          • + *
          • + *

            Array with one address: "Addresses": ["192.0.2.44/32"] + *

            + *
          • + *
          • + *

            Array with three addresses: "Addresses": ["192.0.2.44/32", "192.0.2.0/24", "192.0.0.0/16"] + *

            + *
          • + *
          • + *

            INVALID specification: "Addresses": [""] INVALID

            + *
          • + *
          */ Addresses: string[] | undefined; @@ -7147,7 +7226,7 @@ export interface GetWebACLResponse { LockToken?: string; /** - *

          The URL to use in SDK integrations with Amazon Web Services managed rule groups. For example, you can use the integration SDKs with the account takeover prevention managed rule group AWSManagedRulesATPRuleSet. This is only populated if you are using a rule group in your web ACL that integrates with your applications in this way. For more information, see WAF application integration in the WAF Developer Guide.

          + *

          The URL to use in SDK integrations with Amazon Web Services managed rule groups. For example, you can use the integration SDKs with the account takeover prevention managed rule group AWSManagedRulesATPRuleSet. This is only populated if you are using a rule group in your web ACL that integrates with your applications in this way. For more information, see WAF client application integration in the WAF Developer Guide.

          */ ApplicationIntegrationURL?: string; } diff --git a/codegen/sdk-codegen/aws-models/apprunner.json b/codegen/sdk-codegen/aws-models/apprunner.json index 889e0868b857..3699eebcaa20 100644 --- a/codegen/sdk-codegen/aws-models/apprunner.json +++ b/codegen/sdk-codegen/aws-models/apprunner.json @@ -2618,6 +2618,18 @@ { "value": "NODEJS_12", "name": "NODEJS_12" + }, + { + "value": "NODEJS_14", + "name": "NODEJS_14" + }, + { + "value": "CORRETTO_8", + "name": "CORRETTO_8" + }, + { + "value": "CORRETTO_11", + "name": "CORRETTO_11" } ] } diff --git a/codegen/sdk-codegen/aws-models/customer-profiles.json b/codegen/sdk-codegen/aws-models/customer-profiles.json index d9c7040cb0db..4e559032378b 100644 --- a/codegen/sdk-codegen/aws-models/customer-profiles.json +++ b/codegen/sdk-codegen/aws-models/customer-profiles.json @@ -195,6 +195,147 @@ "smithy.api#documentation": "

          A generic address associated with the customer that is not mailing, shipping, or\n billing.

          " } }, + "com.amazonaws.customerprofiles#AppflowIntegration": { + "type": "structure", + "members": { + "FlowDefinition": { + "target": "com.amazonaws.customerprofiles#FlowDefinition", + "traits": { + "smithy.api#required": {} + } + }, + "Batches": { + "target": "com.amazonaws.customerprofiles#Batches", + "traits": { + "smithy.api#documentation": "

          Batches in workflow of type APPFLOW_INTEGRATION.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Details for workflow of type APPFLOW_INTEGRATION.

          " + } + }, + "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowAttributes": { + "type": "structure", + "members": { + "SourceConnectorType": { + "target": "com.amazonaws.customerprofiles#SourceConnectorType", + "traits": { + "smithy.api#documentation": "

          Specifies the source connector type, such as Salesforce, ServiceNow, and Marketo. Indicates source of ingestion.

          ", + "smithy.api#required": {} + } + }, + "ConnectorProfileName": { + "target": "com.amazonaws.customerprofiles#ConnectorProfileName", + "traits": { + "smithy.api#documentation": "

          The name of the AppFlow connector profile used for ingestion.

          ", + "smithy.api#required": {} + } + }, + "RoleArn": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role. Customer Profiles assumes this role to create resources on your behalf as part of workflow execution.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Structure holding all APPFLOW_INTEGRATION specific workflow attributes.

          " + } + }, + "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowMetrics": { + "type": "structure", + "members": { + "RecordsProcessed": { + "target": "com.amazonaws.customerprofiles#long", + "traits": { + "smithy.api#documentation": "

          Number of records processed in APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "StepsCompleted": { + "target": "com.amazonaws.customerprofiles#long", + "traits": { + "smithy.api#documentation": "

          Total steps completed in APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "TotalSteps": { + "target": "com.amazonaws.customerprofiles#long", + "traits": { + "smithy.api#documentation": "

          Total steps in APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Workflow specific execution metrics for APPFLOW_INTEGRATION workflow.

          " + } + }, + "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowStep": { + "type": "structure", + "members": { + "FlowName": { + "target": "com.amazonaws.customerprofiles#FlowName", + "traits": { + "smithy.api#documentation": "

          Name of the flow created during execution of workflow step. APPFLOW_INTEGRATION workflow type creates an appflow flow during workflow step execution on the customers behalf.

          ", + "smithy.api#required": {} + } + }, + "Status": { + "target": "com.amazonaws.customerprofiles#Status", + "traits": { + "smithy.api#documentation": "

          Workflow step status for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "ExecutionMessage": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Message indicating execution of workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "RecordsProcessed": { + "target": "com.amazonaws.customerprofiles#long", + "traits": { + "smithy.api#documentation": "

          Total number of records processed during execution of workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "BatchRecordsStartTime": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Start datetime of records pulled in batch during execution of workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "BatchRecordsEndTime": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          End datetime of records pulled in batch during execution of workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "CreatedAt": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Creation timestamp of workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + }, + "LastUpdatedAt": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Last updated timestamp for workflow step for APPFLOW_INTEGRATION workflow.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Workflow step details for APPFLOW_INTEGRATION workflow.

          " + } + }, "com.amazonaws.customerprofiles#AttributeSourceIdMap": { "type": "map", "key": { @@ -253,6 +394,34 @@ "smithy.api#httpError": 400 } }, + "com.amazonaws.customerprofiles#Batch": { + "type": "structure", + "members": { + "StartTime": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Start time of batch to split ingestion.

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          End time of batch to split ingestion.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Batch defines the boundaries for ingestion for each step in APPFLOW_INTEGRATION workflow. APPFLOW_INTEGRATION workflow splits ingestion based on these boundaries.

          " + } + }, + "com.amazonaws.customerprofiles#Batches": { + "type": "list", + "member": { + "target": "com.amazonaws.customerprofiles#Batch" + } + }, "com.amazonaws.customerprofiles#BucketName": { "type": "string", "traits": { @@ -507,6 +676,106 @@ } } }, + "com.amazonaws.customerprofiles#CreateIntegrationWorkflow": { + "type": "operation", + "input": { + "target": "com.amazonaws.customerprofiles#CreateIntegrationWorkflowRequest" + }, + "output": { + "target": "com.amazonaws.customerprofiles#CreateIntegrationWorkflowResponse" + }, + "errors": [ + { + "target": "com.amazonaws.customerprofiles#AccessDeniedException" + }, + { + "target": "com.amazonaws.customerprofiles#BadRequestException" + }, + { + "target": "com.amazonaws.customerprofiles#InternalServerException" + }, + { + "target": "com.amazonaws.customerprofiles#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.customerprofiles#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n\tCreates an integration workflow. An integration workflow is an async process which ingests historic data and sets up an integration for ongoing updates. The supported Amazon AppFlow sources are Salesforce, ServiceNow, and Marketo.\n\t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/domains/{DomainName}/workflows/integrations", + "code": 200 + } + } + }, + "com.amazonaws.customerprofiles#CreateIntegrationWorkflowRequest": { + "type": "structure", + "members": { + "DomainName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The unique name of the domain.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "WorkflowType": { + "target": "com.amazonaws.customerprofiles#WorkflowType", + "traits": { + "smithy.api#documentation": "

          The type of workflow. The only supported value is APPFLOW_INTEGRATION.

          ", + "smithy.api#required": {} + } + }, + "IntegrationConfig": { + "target": "com.amazonaws.customerprofiles#IntegrationConfig", + "traits": { + "smithy.api#documentation": "

          Configuration data for integration workflow.

          ", + "smithy.api#required": {} + } + }, + "ObjectTypeName": { + "target": "com.amazonaws.customerprofiles#typeName", + "traits": { + "smithy.api#documentation": "

          The name of the profile object type.

          ", + "smithy.api#required": {} + } + }, + "RoleArn": { + "target": "com.amazonaws.customerprofiles#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role. Customer Profiles assumes this role to create resources on your behalf as part of workflow execution.

          ", + "smithy.api#required": {} + } + }, + "Tags": { + "target": "com.amazonaws.customerprofiles#TagMap", + "traits": { + "smithy.api#documentation": "

          The tags used to organize, track, or control access for this resource.

          " + } + } + } + }, + "com.amazonaws.customerprofiles#CreateIntegrationWorkflowResponse": { + "type": "structure", + "members": { + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#uuid", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          ", + "smithy.api#required": {} + } + }, + "Message": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          A message indicating create request was received.

          ", + "smithy.api#required": {} + } + } + } + }, "com.amazonaws.customerprofiles#CreateProfile": { "type": "operation", "input": { @@ -717,6 +986,9 @@ { "target": "com.amazonaws.customerprofiles#CreateDomain" }, + { + "target": "com.amazonaws.customerprofiles#CreateIntegrationWorkflow" + }, { "target": "com.amazonaws.customerprofiles#CreateProfile" }, @@ -738,6 +1010,9 @@ { "target": "com.amazonaws.customerprofiles#DeleteProfileObjectType" }, + { + "target": "com.amazonaws.customerprofiles#DeleteWorkflow" + }, { "target": "com.amazonaws.customerprofiles#GetAutoMergingPreview" }, @@ -759,6 +1034,12 @@ { "target": "com.amazonaws.customerprofiles#GetProfileObjectTypeTemplate" }, + { + "target": "com.amazonaws.customerprofiles#GetWorkflow" + }, + { + "target": "com.amazonaws.customerprofiles#GetWorkflowSteps" + }, { "target": "com.amazonaws.customerprofiles#ListAccountIntegrations" }, @@ -783,6 +1064,9 @@ { "target": "com.amazonaws.customerprofiles#ListTagsForResource" }, + { + "target": "com.amazonaws.customerprofiles#ListWorkflows" + }, { "target": "com.amazonaws.customerprofiles#MergeProfiles" }, @@ -1255,6 +1539,65 @@ } } }, + "com.amazonaws.customerprofiles#DeleteWorkflow": { + "type": "operation", + "input": { + "target": "com.amazonaws.customerprofiles#DeleteWorkflowRequest" + }, + "output": { + "target": "com.amazonaws.customerprofiles#DeleteWorkflowResponse" + }, + "errors": [ + { + "target": "com.amazonaws.customerprofiles#AccessDeniedException" + }, + { + "target": "com.amazonaws.customerprofiles#BadRequestException" + }, + { + "target": "com.amazonaws.customerprofiles#InternalServerException" + }, + { + "target": "com.amazonaws.customerprofiles#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.customerprofiles#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes the specified workflow and all its corresponding resources. This is an async process.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/domains/{DomainName}/workflows/{WorkflowId}", + "code": 200 + } + } + }, + "com.amazonaws.customerprofiles#DeleteWorkflowRequest": { + "type": "structure", + "members": { + "DomainName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The unique name of the domain.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.customerprofiles#DeleteWorkflowResponse": { + "type": "structure", + "members": {} + }, "com.amazonaws.customerprofiles#DestinationField": { "type": "string", "traits": { @@ -2029,6 +2372,12 @@ "traits": { "smithy.api#documentation": "

          A map in which each key is an event type from an external application such as Segment or Shopify, and each value is an ObjectTypeName (template) used to ingest the event.\nIt supports the following event types: SegmentIdentify, ShopifyCreateCustomers, ShopifyUpdateCustomers, ShopifyCreateDraftOrders, \nShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

          " } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          " + } } } }, @@ -2058,7 +2407,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Before calling this API, use CreateDomain or\n UpdateDomain to\n enable identity resolution: set Matching to true.

          \n

          GetMatches returns potentially matching profiles, based on the results of the latest run\n of a machine learning process.

          \n \n

          The process of matching duplicate profiles. If Matching = true, Amazon Connect Customer Profiles starts a weekly\nbatch process called Identity Resolution Job. If you do not specify a date and time for Identity Resolution Job to run, by default it runs every \nSaturday at 12AM UTC to detect duplicate profiles in your domains.

          \n

          After the Identity Resolution Job completes, use the \nGetMatches\nAPI to return and review the results. Or, if you have configured ExportingConfig in the MatchingRequest, you can download the results from\nS3.

          \n
          \n\n

          Amazon Connect uses the following profile attributes to identify matches:

          \n
            \n
          • \n

            PhoneNumber

            \n
          • \n
          • \n

            HomePhoneNumber

            \n
          • \n
          • \n

            BusinessPhoneNumber

            \n
          • \n
          • \n

            MobilePhoneNumber

            \n
          • \n
          • \n

            EmailAddress

            \n
          • \n
          • \n

            PersonalEmailAddress

            \n
          • \n
          • \n

            BusinessEmailAddress

            \n
          • \n
          • \n

            FullName

            \n
          • \n
          • \n

            BusinessName

            \n
          • \n
          \n

          For example, two or more profiles—with spelling mistakes such as John Doe and Jhn Doe, or different casing\n email addresses such as JOHN_DOE@ANYCOMPANY.COM and\n johndoe@anycompany.com, or different phone number\n formats such as 555-010-0000 and +1-555-010-0000—can be detected as belonging to the same customer John Doe and merged into a unified profile.

          ", + "smithy.api#documentation": "

          Before calling this API, use CreateDomain or\n UpdateDomain to\n enable identity resolution: set Matching to true.

          \n

          GetMatches returns potentially matching profiles, based on the results of the latest run\n of a machine learning process.

          \n \n

          The process of matching duplicate profiles. If Matching = true, Amazon Connect Customer Profiles starts a weekly\nbatch process called Identity Resolution Job. If you do not specify a date and time for Identity Resolution Job to run, by default it runs every \nSaturday at 12AM UTC to detect duplicate profiles in your domains.

          \n

          After the Identity Resolution Job completes, use the \nGetMatches\nAPI to return and review the results. Or, if you have configured ExportingConfig in the MatchingRequest, you can download the results from\nS3.

          \n
          \n\n

          Amazon Connect uses the following profile attributes to identify matches:

          \n
            \n
          • \n

            PhoneNumber

            \n
          • \n
          • \n

            HomePhoneNumber

            \n
          • \n
          • \n

            BusinessPhoneNumber

            \n
          • \n
          • \n

            MobilePhoneNumber

            \n
          • \n
          • \n

            EmailAddress

            \n
          • \n
          • \n

            PersonalEmailAddress

            \n
          • \n
          • \n

            BusinessEmailAddress

            \n
          • \n
          • \n

            FullName

            \n
          • \n
          \n

          For example, two or more profiles—with spelling mistakes such as John Doe and Jhn Doe, or different casing\n email addresses such as JOHN_DOE@ANYCOMPANY.COM and\n johndoe@anycompany.com, or different phone number\n formats such as 555-010-0000 and +1-555-010-0000—can be detected as belonging to the same customer John Doe and merged into a unified profile.

          ", "smithy.api#http": { "method": "GET", "uri": "/domains/{DomainName}/matches", @@ -2303,49 +2652,255 @@ } } }, - "com.amazonaws.customerprofiles#GetProfileObjectTypeTemplateResponse": { + "com.amazonaws.customerprofiles#GetProfileObjectTypeTemplateResponse": { + "type": "structure", + "members": { + "TemplateId": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          A unique identifier for the object template.

          " + } + }, + "SourceName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The name of the source of the object template.

          " + } + }, + "SourceObject": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The source of the object template.

          " + } + }, + "AllowProfileCreation": { + "target": "com.amazonaws.customerprofiles#boolean", + "traits": { + "smithy.api#documentation": "

          Indicates whether a profile should be created when data is received if one doesn’t exist\n for an object of this type. The default is FALSE. If the AllowProfileCreation\n flag is set to FALSE, then the service tries to fetch a standard profile and\n associate this object with the profile. If it is set to TRUE, and if no match\n is found, then the service creates a new standard profile.

          " + } + }, + "SourceLastUpdatedTimestampFormat": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          The format of your sourceLastUpdatedTimestamp that was previously set\n up.

          " + } + }, + "Fields": { + "target": "com.amazonaws.customerprofiles#FieldMap", + "traits": { + "smithy.api#documentation": "

          A map of the name and ObjectType field.

          " + } + }, + "Keys": { + "target": "com.amazonaws.customerprofiles#KeyMap", + "traits": { + "smithy.api#documentation": "

          A list of unique keys that can be used to map data to the profile.

          " + } + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflow": { + "type": "operation", + "input": { + "target": "com.amazonaws.customerprofiles#GetWorkflowRequest" + }, + "output": { + "target": "com.amazonaws.customerprofiles#GetWorkflowResponse" + }, + "errors": [ + { + "target": "com.amazonaws.customerprofiles#AccessDeniedException" + }, + { + "target": "com.amazonaws.customerprofiles#BadRequestException" + }, + { + "target": "com.amazonaws.customerprofiles#InternalServerException" + }, + { + "target": "com.amazonaws.customerprofiles#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.customerprofiles#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Get details of specified workflow.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/domains/{DomainName}/workflows/{WorkflowId}", + "code": 200 + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflowRequest": { + "type": "structure", + "members": { + "DomainName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The unique name of the domain.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#uuid", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflowResponse": { + "type": "structure", + "members": { + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#uuid", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          " + } + }, + "WorkflowType": { + "target": "com.amazonaws.customerprofiles#WorkflowType", + "traits": { + "smithy.api#documentation": "

          The type of workflow. The only supported value is APPFLOW_INTEGRATION.

          " + } + }, + "Status": { + "target": "com.amazonaws.customerprofiles#Status", + "traits": { + "smithy.api#documentation": "

          Status of workflow execution.

          " + } + }, + "ErrorDescription": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Workflow error messages during execution (if any).

          " + } + }, + "StartDate": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          The timestamp that represents when workflow execution started.

          " + } + }, + "LastUpdatedAt": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          The timestamp that represents when workflow execution last updated.

          " + } + }, + "Attributes": { + "target": "com.amazonaws.customerprofiles#WorkflowAttributes", + "traits": { + "smithy.api#documentation": "

          Attributes provided for workflow execution.

          " + } + }, + "Metrics": { + "target": "com.amazonaws.customerprofiles#WorkflowMetrics", + "traits": { + "smithy.api#documentation": "

          Workflow specific execution metrics.

          " + } + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflowSteps": { + "type": "operation", + "input": { + "target": "com.amazonaws.customerprofiles#GetWorkflowStepsRequest" + }, + "output": { + "target": "com.amazonaws.customerprofiles#GetWorkflowStepsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.customerprofiles#AccessDeniedException" + }, + { + "target": "com.amazonaws.customerprofiles#BadRequestException" + }, + { + "target": "com.amazonaws.customerprofiles#InternalServerException" + }, + { + "target": "com.amazonaws.customerprofiles#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.customerprofiles#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Get granular list of steps in workflow.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/domains/{DomainName}/workflows/{WorkflowId}/steps", + "code": 200 + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflowStepsRequest": { + "type": "structure", + "members": { + "DomainName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The unique name of the domain.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#uuid", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.customerprofiles#token", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "next-token" + } + }, + "MaxResults": { + "target": "com.amazonaws.customerprofiles#maxSize100", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per page.

          ", + "smithy.api#httpQuery": "max-results" + } + } + } + }, + "com.amazonaws.customerprofiles#GetWorkflowStepsResponse": { "type": "structure", "members": { - "TemplateId": { - "target": "com.amazonaws.customerprofiles#name", - "traits": { - "smithy.api#documentation": "

          A unique identifier for the object template.

          " - } - }, - "SourceName": { - "target": "com.amazonaws.customerprofiles#name", - "traits": { - "smithy.api#documentation": "

          The name of the source of the object template.

          " - } - }, - "SourceObject": { - "target": "com.amazonaws.customerprofiles#name", - "traits": { - "smithy.api#documentation": "

          The source of the object template.

          " - } - }, - "AllowProfileCreation": { - "target": "com.amazonaws.customerprofiles#boolean", + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#uuid", "traits": { - "smithy.api#documentation": "

          Indicates whether a profile should be created when data is received if one doesn’t exist\n for an object of this type. The default is FALSE. If the AllowProfileCreation\n flag is set to FALSE, then the service tries to fetch a standard profile and\n associate this object with the profile. If it is set to TRUE, and if no match\n is found, then the service creates a new standard profile.

          " + "smithy.api#documentation": "

          Unique identifier for the workflow.

          " } }, - "SourceLastUpdatedTimestampFormat": { - "target": "com.amazonaws.customerprofiles#string1To255", + "WorkflowType": { + "target": "com.amazonaws.customerprofiles#WorkflowType", "traits": { - "smithy.api#documentation": "

          The format of your sourceLastUpdatedTimestamp that was previously set\n up.

          " + "smithy.api#documentation": "

          The type of workflow. The only supported value is APPFLOW_INTEGRATION.

          " } }, - "Fields": { - "target": "com.amazonaws.customerprofiles#FieldMap", + "Items": { + "target": "com.amazonaws.customerprofiles#WorkflowStepsList", "traits": { - "smithy.api#documentation": "

          A map of the name and ObjectType field.

          " + "smithy.api#documentation": "

          List containing workflow step details.

          " } }, - "Keys": { - "target": "com.amazonaws.customerprofiles#KeyMap", + "NextToken": { + "target": "com.amazonaws.customerprofiles#token", "traits": { - "smithy.api#documentation": "

          A list of unique keys that can be used to map data to the profile.

          " + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " } } } @@ -2461,6 +3016,20 @@ "smithy.api#documentation": "

          Specifies the configuration used when importing incremental records from the\n source.

          " } }, + "com.amazonaws.customerprofiles#IntegrationConfig": { + "type": "structure", + "members": { + "AppflowIntegration": { + "target": "com.amazonaws.customerprofiles#AppflowIntegration", + "traits": { + "smithy.api#documentation": "

          Configuration data for APPFLOW_INTEGRATION workflow type.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration data for integration workflow.

          " + } + }, "com.amazonaws.customerprofiles#IntegrationList": { "type": "list", "member": { @@ -2649,6 +3218,13 @@ "smithy.api#documentation": "

          The maximum number of objects returned per page.

          ", "smithy.api#httpQuery": "max-results" } + }, + "IncludeHidden": { + "target": "com.amazonaws.customerprofiles#optionalBoolean", + "traits": { + "smithy.api#documentation": "

          Boolean to indicate if hidden integration should be returned. Defaults to False.

          ", + "smithy.api#httpQuery": "include-hidden" + } } } }, @@ -2900,6 +3476,12 @@ "traits": { "smithy.api#documentation": "

          A map in which each key is an event type from an external application such as Segment or Shopify, and each value is an ObjectTypeName (template) used to ingest the event.\nIt supports the following event types: SegmentIdentify, ShopifyCreateCustomers, ShopifyUpdateCustomers, ShopifyCreateDraftOrders, \nShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

          " } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          " + } } }, "traits": { @@ -2964,6 +3546,13 @@ "smithy.api#documentation": "

          The maximum number of objects returned per page.

          ", "smithy.api#httpQuery": "max-results" } + }, + "IncludeHidden": { + "target": "com.amazonaws.customerprofiles#optionalBoolean", + "traits": { + "smithy.api#documentation": "

          Boolean to indicate if hidden integration should be returned. Defaults to False.

          ", + "smithy.api#httpQuery": "include-hidden" + } } } }, @@ -3374,6 +3963,158 @@ } } }, + "com.amazonaws.customerprofiles#ListWorkflows": { + "type": "operation", + "input": { + "target": "com.amazonaws.customerprofiles#ListWorkflowsRequest" + }, + "output": { + "target": "com.amazonaws.customerprofiles#ListWorkflowsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.customerprofiles#AccessDeniedException" + }, + { + "target": "com.amazonaws.customerprofiles#BadRequestException" + }, + { + "target": "com.amazonaws.customerprofiles#InternalServerException" + }, + { + "target": "com.amazonaws.customerprofiles#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.customerprofiles#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Query to list all workflows.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/domains/{DomainName}/workflows", + "code": 200 + } + } + }, + "com.amazonaws.customerprofiles#ListWorkflowsItem": { + "type": "structure", + "members": { + "WorkflowType": { + "target": "com.amazonaws.customerprofiles#WorkflowType", + "traits": { + "smithy.api#documentation": "

          The type of workflow. The only supported value is APPFLOW_INTEGRATION.

          ", + "smithy.api#required": {} + } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          ", + "smithy.api#required": {} + } + }, + "Status": { + "target": "com.amazonaws.customerprofiles#Status", + "traits": { + "smithy.api#documentation": "

          Status of workflow execution.

          ", + "smithy.api#required": {} + } + }, + "StatusDescription": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Description for workflow execution status.

          ", + "smithy.api#required": {} + } + }, + "CreatedAt": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Creation timestamp for workflow.

          ", + "smithy.api#required": {} + } + }, + "LastUpdatedAt": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Last updated timestamp for workflow.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A workflow in list of workflows.

          " + } + }, + "com.amazonaws.customerprofiles#ListWorkflowsRequest": { + "type": "structure", + "members": { + "DomainName": { + "target": "com.amazonaws.customerprofiles#name", + "traits": { + "smithy.api#documentation": "

          The unique name of the domain.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "WorkflowType": { + "target": "com.amazonaws.customerprofiles#WorkflowType", + "traits": { + "smithy.api#documentation": "

          The type of workflow. The only supported value is APPFLOW_INTEGRATION.

          " + } + }, + "Status": { + "target": "com.amazonaws.customerprofiles#Status", + "traits": { + "smithy.api#documentation": "

          Status of workflow execution.

          " + } + }, + "QueryStartDate": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Retrieve workflows started after timestamp.

          " + } + }, + "QueryEndDate": { + "target": "com.amazonaws.customerprofiles#timestamp", + "traits": { + "smithy.api#documentation": "

          Retrieve workflows ended after timestamp.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.customerprofiles#token", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "next-token" + } + }, + "MaxResults": { + "target": "com.amazonaws.customerprofiles#maxSize100", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per page.

          ", + "smithy.api#httpQuery": "max-results" + } + } + } + }, + "com.amazonaws.customerprofiles#ListWorkflowsResponse": { + "type": "structure", + "members": { + "Items": { + "target": "com.amazonaws.customerprofiles#WorkflowList", + "traits": { + "smithy.api#documentation": "

          List containing workflow details.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.customerprofiles#token", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " + } + } + } + }, "com.amazonaws.customerprofiles#MarketoConnectorOperator": { "type": "string", "traits": { @@ -4148,6 +4889,12 @@ "traits": { "smithy.api#documentation": "

          A map in which each key is an event type from an external application such as Segment or Shopify, and each value is an ObjectTypeName (template) used to ingest the event.\nIt supports the following event types: SegmentIdentify, ShopifyCreateCustomers, ShopifyUpdateCustomers, ShopifyCreateDraftOrders, \nShopifyUpdateDraftOrders, ShopifyCreateOrders, and ShopifyUpdatedOrders.

          " } + }, + "WorkflowId": { + "target": "com.amazonaws.customerprofiles#string1To255", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the workflow.

          " + } } } }, @@ -4425,6 +5172,16 @@ "smithy.api#httpError": 404 } }, + "com.amazonaws.customerprofiles#RoleArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 512 + }, + "smithy.api#pattern": "^arn:aws:iam:.*:[0-9]+:" + } + }, "com.amazonaws.customerprofiles#S3ConnectorOperator": { "type": "string", "traits": { @@ -5111,6 +5868,41 @@ "target": "com.amazonaws.customerprofiles#StandardIdentifier" } }, + "com.amazonaws.customerprofiles#Status": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "NOT_STARTED", + "name": "NOT_STARTED" + }, + { + "value": "IN_PROGRESS", + "name": "IN_PROGRESS" + }, + { + "value": "COMPLETE", + "name": "COMPLETE" + }, + { + "value": "FAILED", + "name": "FAILED" + }, + { + "value": "SPLIT", + "name": "SPLIT" + }, + { + "value": "RETRY", + "name": "RETRY" + }, + { + "value": "CANCELLED", + "name": "CANCELLED" + } + ] + } + }, "com.amazonaws.customerprofiles#TagArn": { "type": "string", "traits": { @@ -5841,6 +6633,71 @@ } } }, + "com.amazonaws.customerprofiles#WorkflowAttributes": { + "type": "structure", + "members": { + "AppflowIntegration": { + "target": "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowAttributes", + "traits": { + "smithy.api#documentation": "

          Workflow attributes specific to APPFLOW_INTEGRATION workflow.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Structure to hold workflow attributes.

          " + } + }, + "com.amazonaws.customerprofiles#WorkflowList": { + "type": "list", + "member": { + "target": "com.amazonaws.customerprofiles#ListWorkflowsItem" + } + }, + "com.amazonaws.customerprofiles#WorkflowMetrics": { + "type": "structure", + "members": { + "AppflowIntegration": { + "target": "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowMetrics", + "traits": { + "smithy.api#documentation": "

          Workflow execution metrics for APPFLOW_INTEGRATION workflow.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Generic object containing workflow execution metrics.

          " + } + }, + "com.amazonaws.customerprofiles#WorkflowStepItem": { + "type": "structure", + "members": { + "AppflowIntegration": { + "target": "com.amazonaws.customerprofiles#AppflowIntegrationWorkflowStep", + "traits": { + "smithy.api#documentation": "

          Workflow step information specific to APPFLOW_INTEGRATION workflow.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          List containing steps in workflow.

          " + } + }, + "com.amazonaws.customerprofiles#WorkflowStepsList": { + "type": "list", + "member": { + "target": "com.amazonaws.customerprofiles#WorkflowStepItem" + } + }, + "com.amazonaws.customerprofiles#WorkflowType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "APPFLOW_INTEGRATION", + "name": "APPFLOW_INTEGRATION" + } + ] + } + }, "com.amazonaws.customerprofiles#ZendeskConnectorOperator": { "type": "string", "traits": { diff --git a/codegen/sdk-codegen/aws-models/dynamodb.json b/codegen/sdk-codegen/aws-models/dynamodb.json index 477b536d6485..2b96fc067f10 100644 --- a/codegen/sdk-codegen/aws-models/dynamodb.json +++ b/codegen/sdk-codegen/aws-models/dynamodb.json @@ -3325,7 +3325,7 @@ } ], "traits": { - "smithy.api#documentation": "

          This operation allows you to perform reads and singleton writes on data stored in\n DynamoDB, using PartiQL.

          " + "smithy.api#documentation": "

          This operation allows you to perform reads and singleton writes on data stored in\n DynamoDB, using PartiQL.

          \n

          For PartiQL reads (SELECT statement), if the total number of processed\n items exceeds the maximum dataset size limit of 1 MB, the read stops and results are\n returned to the user as a LastEvaluatedKey value to continue the read in a\n subsequent operation. If the filter criteria in WHERE clause does not match\n any data, the read will return an empty result set.

          \n

          A single SELECT statement response can return up to the maximum number of\n items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any\n filtering to the results using WHERE clause). If\n LastEvaluatedKey is present in the response, you need to paginate the\n result set.

          " } }, "com.amazonaws.dynamodb#ExecuteStatementInput": { @@ -3358,6 +3358,12 @@ }, "ReturnConsumedCapacity": { "target": "com.amazonaws.dynamodb#ReturnConsumedCapacity" + }, + "Limit": { + "target": "com.amazonaws.dynamodb#PositiveIntegerObject", + "traits": { + "smithy.api#documentation": "

          The maximum number of items to evaluate (not necessarily the number of matching\n items). If DynamoDB processes the number of items up to the limit while processing the\n results, it stops the operation and returns the matching values up to that point, along\n with a key in LastEvaluatedKey to apply in a subsequent operation so you\n can pick up where you left off. Also, if the processed dataset size exceeds 1 MB before\n DynamoDB reaches this limit, it stops the operation and returns the matching values up\n to the limit, and a key in LastEvaluatedKey to apply in a subsequent\n operation to continue the operation.

          " + } } } }, @@ -3378,6 +3384,12 @@ }, "ConsumedCapacity": { "target": "com.amazonaws.dynamodb#ConsumedCapacity" + }, + "LastEvaluatedKey": { + "target": "com.amazonaws.dynamodb#Key", + "traits": { + "smithy.api#documentation": "

          The primary key of the item where the operation stopped, inclusive of the previous\n result set. Use this value to start a new operation, excluding this value in the new\n request. If LastEvaluatedKey is empty, then the \"last page\" of results has\n been processed and there is no more data to be retrieved. If\n LastEvaluatedKey is not empty, it does not necessarily mean that there\n is more data in the result set. The only way to know when you have reached the end of\n the result set is when LastEvaluatedKey is empty.

          " + } } } }, diff --git a/codegen/sdk-codegen/aws-models/imagebuilder.json b/codegen/sdk-codegen/aws-models/imagebuilder.json index 8453dc8a0a63..c408f756b2f0 100644 --- a/codegen/sdk-codegen/aws-models/imagebuilder.json +++ b/codegen/sdk-codegen/aws-models/imagebuilder.json @@ -59,7 +59,7 @@ "userDataOverride": { "target": "com.amazonaws.imagebuilder#UserDataOverride", "traits": { - "smithy.api#documentation": "

          Use this property to provide commands or a command script to run when you launch \n\t\t\tyour build instance.

          \n\t\t \n\t\t\t

          The userDataOverride property replaces any commands that Image Builder might have added to ensure\n\t\t\t\tthat Systems Manager is installed on your Linux build instance. If you override the user data,\n\t\t\t\tmake sure that you add commands to install Systems Manager, if it is not pre-installed on your\n\t\t\t\tbase image.

          \n\t\t
          " + "smithy.api#documentation": "

          Use this property to provide commands or a command script to run when you launch \n\t\t\tyour build instance.

          \n\t\t

          The userDataOverride property replaces any commands that Image Builder might have added to ensure\n\t\t\tthat Systems Manager is installed on your Linux build instance. If you override the user data,\n\t\t\tmake sure that you add commands to install Systems Manager, if it is not pre-installed on your\n\t\t\tbase image.

          \n\t\t \n\t\t\t

          The user data is always base 64 encoded. For example, the \n\t\t\t\tfollowing commands are encoded as IyEvYmluL2Jhc2gKbWtkaXIgLXAgL3Zhci9iYi8KdG91Y2ggL3Zhci$:

          \n\t\t\t\n\t\t\t

          \n #!/bin/bash\n

          \n\t\t\t

          mkdir -p /var/bb/

          \n\t\t\t

          touch /var

          \n\t\t
          " } } }, @@ -386,7 +386,7 @@ "data": { "target": "com.amazonaws.imagebuilder#ComponentData", "traits": { - "smithy.api#documentation": "

          The data of the component.

          " + "smithy.api#documentation": "

          Component data contains the YAML document content for the component.

          " } }, "kmsKeyId": { @@ -1097,7 +1097,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a new component that can be used to build, validate, test, and assess your\n\t\t\timage.

          ", + "smithy.api#documentation": "

          Creates a new component that can be used to build, validate, test, and assess your\n\t\t\timage. The component is based on a YAML document that you specify using exactly one \n\t\t\tof the following methods:

          \n\t\t
            \n
          • \n\t\t\t\t

            Inline, using the data property in the request body.

            \n\t\t\t
          • \n
          • \n\t\t\t\t

            A URL that points to a YAML document file stored in Amazon S3, using the \n\t\t\t\t\turi property in the request body.

            \n\t\t\t
          • \n
          ", "smithy.api#http": { "method": "PUT", "uri": "/CreateComponent", @@ -1150,13 +1150,13 @@ "data": { "target": "com.amazonaws.imagebuilder#InlineComponentData", "traits": { - "smithy.api#documentation": "

          The data of the component. Used to specify the data inline. Either data or\n\t\t\turi can be used to specify the data within the component.

          " + "smithy.api#documentation": "

          Component data contains inline YAML document content for the component. \n\t\t\tAlternatively, you can specify the uri of a YAML document file stored in \n\t\t\tAmazon S3. However, you cannot specify both properties.

          " } }, "uri": { "target": "com.amazonaws.imagebuilder#Uri", "traits": { - "smithy.api#documentation": "

          The uri of the component. Must be an Amazon S3 URL and the requester must have permission to\n\t\t\taccess the Amazon S3 bucket. If you use Amazon S3, you can specify component content up to your service\n\t\t\tquota. Either data or uri can be used to specify the data within the\n\t\t\tcomponent.

          " + "smithy.api#documentation": "

          The uri of a YAML component document file. This must be an S3 URL \n\t\t\t(s3://bucket/key), and the requester must have permission to access \n\t\t\tthe S3 bucket it points to. If you use Amazon S3, you can specify component content \n\t\t\tup to your service quota.

          \n\t\t

          Alternatively, you can specify the YAML document inline, using the component \n\t\t\tdata property. You cannot specify both properties.

          " } }, "kmsKeyId": { @@ -2642,6 +2642,12 @@ "traits": { "smithy.api#documentation": "

          Configure export settings to deliver disk images created from your image build, \n\t\t\tusing a file format that is compatible with your VMs in that Region.

          " } + }, + "fastLaunchConfigurations": { + "target": "com.amazonaws.imagebuilder#FastLaunchConfigurationList", + "traits": { + "smithy.api#documentation": "

          The Windows faster-launching configurations to use for AMI distribution.

          " + } } }, "traits": { @@ -2919,6 +2925,97 @@ "com.amazonaws.imagebuilder#ErrorMessage": { "type": "string" }, + "com.amazonaws.imagebuilder#FastLaunchConfiguration": { + "type": "structure", + "members": { + "enabled": { + "target": "com.amazonaws.imagebuilder#Boolean", + "traits": { + "smithy.api#documentation": "

          A Boolean that represents the current state of faster launching for the \n\t\t\tWindows AMI. Set to true to start using Windows faster launching, or \n\t\t\tfalse to stop using it.

          ", + "smithy.api#required": {} + } + }, + "snapshotConfiguration": { + "target": "com.amazonaws.imagebuilder#FastLaunchSnapshotConfiguration", + "traits": { + "smithy.api#documentation": "

          Configuration settings for managing the number of snapshots that are \n\t\t\tcreated from pre-provisioned instances for the Windows AMI when faster \n\t\t\tlaunching is enabled.

          " + } + }, + "maxParallelLaunches": { + "target": "com.amazonaws.imagebuilder#MaxParallelLaunches", + "traits": { + "smithy.api#documentation": "

          The maximum number of parallel instances that are launched for creating \n\t\t\tresources.

          " + } + }, + "launchTemplate": { + "target": "com.amazonaws.imagebuilder#FastLaunchLaunchTemplateSpecification", + "traits": { + "smithy.api#documentation": "

          The launch template that the fast-launch enabled Windows AMI uses when it \n\t\t\tlaunches Windows instances to create pre-provisioned snapshots.

          " + } + }, + "accountId": { + "target": "com.amazonaws.imagebuilder#AccountId", + "traits": { + "smithy.api#documentation": "

          The owner account ID for the fast-launch enabled Windows AMI.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Define and configure faster launching for output Windows AMIs.

          " + } + }, + "com.amazonaws.imagebuilder#FastLaunchConfigurationList": { + "type": "list", + "member": { + "target": "com.amazonaws.imagebuilder#FastLaunchConfiguration" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1000 + } + } + }, + "com.amazonaws.imagebuilder#FastLaunchLaunchTemplateSpecification": { + "type": "structure", + "members": { + "launchTemplateId": { + "target": "com.amazonaws.imagebuilder#LaunchTemplateId", + "traits": { + "smithy.api#documentation": "

          The ID of the launch template to use for faster launching for a Windows AMI.

          " + } + }, + "launchTemplateName": { + "target": "com.amazonaws.imagebuilder#NonEmptyString", + "traits": { + "smithy.api#documentation": "

          The name of the launch template to use for faster launching for a Windows AMI.

          " + } + }, + "launchTemplateVersion": { + "target": "com.amazonaws.imagebuilder#NonEmptyString", + "traits": { + "smithy.api#documentation": "

          The version of the launch template to use for faster launching for a Windows AMI.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Identifies the launch template that the associated Windows AMI uses for \n\t\t\tlaunching an instance when faster launching is enabled.

          \n\t\t \n\t\t\t

          You can specify either the launchTemplateName or the \n\t\t\t\tlaunchTemplateId, but not both.

          \n\t\t
          " + } + }, + "com.amazonaws.imagebuilder#FastLaunchSnapshotConfiguration": { + "type": "structure", + "members": { + "targetResourceCount": { + "target": "com.amazonaws.imagebuilder#TargetResourceCount", + "traits": { + "smithy.api#documentation": "

          The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled \n\t\t\tWindows AMI.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration settings for creating and managing pre-provisioned snapshots \n\t\t\tfor a fast-launch enabled Windows AMI.

          " + } + }, "com.amazonaws.imagebuilder#Filter": { "type": "structure", "members": { @@ -6354,6 +6451,16 @@ "smithy.api#documentation": "

          Logging configuration defines where Image Builder uploads your logs.

          " } }, + "com.amazonaws.imagebuilder#MaxParallelLaunches": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 10000 + } + } + }, "com.amazonaws.imagebuilder#NonEmptyString": { "type": "string", "traits": { @@ -7292,6 +7399,16 @@ "smithy.api#documentation": "

          The container repository where the output container image is stored.

          " } }, + "com.amazonaws.imagebuilder#TargetResourceCount": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 10000 + } + } + }, "com.amazonaws.imagebuilder#Timezone": { "type": "string", "traits": { diff --git a/codegen/sdk-codegen/aws-models/lambda.json b/codegen/sdk-codegen/aws-models/lambda.json index 1f336dd3d4e8..5d11042684ca 100644 --- a/codegen/sdk-codegen/aws-models/lambda.json +++ b/codegen/sdk-codegen/aws-models/lambda.json @@ -2951,6 +2951,42 @@ "code": 200 }, "smithy.waiters#waitable": { + "FunctionActiveV2": { + "documentation": "Waits for the function's State to be Active. This waiter uses GetFunction API. This should be used after new function creation.", + "acceptors": [ + { + "state": "success", + "matcher": { + "output": { + "path": "Configuration.State", + "expected": "Active", + "comparator": "stringEquals" + } + } + }, + { + "state": "failure", + "matcher": { + "output": { + "path": "Configuration.State", + "expected": "Failed", + "comparator": "stringEquals" + } + } + }, + { + "state": "retry", + "matcher": { + "output": { + "path": "Configuration.State", + "expected": "Pending", + "comparator": "stringEquals" + } + } + } + ], + "minDelay": 1 + }, "FunctionExists": { "acceptors": [ { @@ -2967,6 +3003,42 @@ } ], "minDelay": 1 + }, + "FunctionUpdatedV2": { + "documentation": "Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunction API. This should be used after function updates.", + "acceptors": [ + { + "state": "success", + "matcher": { + "output": { + "path": "Configuration.LastUpdateStatus", + "expected": "Successful", + "comparator": "stringEquals" + } + } + }, + { + "state": "failure", + "matcher": { + "output": { + "path": "Configuration.LastUpdateStatus", + "expected": "Failed", + "comparator": "stringEquals" + } + } + }, + { + "state": "retry", + "matcher": { + "output": { + "path": "Configuration.LastUpdateStatus", + "expected": "InProgress", + "comparator": "stringEquals" + } + } + } + ], + "minDelay": 1 } } } @@ -3120,7 +3192,7 @@ }, "smithy.waiters#waitable": { "FunctionActive": { - "documentation": "Waits for the function's State to be Active.", + "documentation": "Waits for the function's State to be Active. This waiter uses GetFunctionConfiguration API. This should be used after new function creation.", "acceptors": [ { "state": "success", @@ -3156,7 +3228,7 @@ "minDelay": 5 }, "FunctionUpdated": { - "documentation": "Waits for the function's LastUpdateStatus to be Successful.", + "documentation": "Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunctionConfiguration API. This should be used after function updates.", "acceptors": [ { "state": "success", @@ -6727,6 +6799,10 @@ "value": "dotnetcore3.1", "name": "dotnetcore31" }, + { + "value": "dotnet6", + "name": "dotnet6" + }, { "value": "nodejs4.3-edge", "name": "nodejs43edge" @@ -7688,7 +7764,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates a Lambda function's code. If code signing is enabled for the function, the code package must be signed\n by a trusted publisher. For more information, see Configuring code signing.

          \n \n

          The function's code is locked when you publish a version. You can't modify the code of a published version,\n only the unpublished version.

          \n \n

          For a function defined as a container image, Lambda resolves the image tag to an image digest. In Amazon ECR, if\n you update the image tag to a new image, Lambda does not automatically update the function.

          \n
          ", + "smithy.api#documentation": "

          Updates a Lambda function's code. If code signing is enabled for the function, the code package must be signed\n by a trusted publisher. For more information, see Configuring code signing.

          \n \n

          If the function's package type is Image, you must specify the code package in ImageUri as \n the URI of a\n container image \n in the Amazon ECR registry.\n

          \n \n

          If the function's package type is Zip, you must specify the deployment\n package as a .zip file\n archive. Enter the Amazon S3 bucket and key of the code .zip file location.\n You can also provide the function code inline using the ZipFile field.

          \n

          The code in the deployment package must be compatible with the target instruction set\n architecture of the function (x86-64 or arm64).

          \n \n

          The function's code is locked when you publish a version. You can't modify the code of a published version,\n only the unpublished version.

          \n \n

          For a function defined as a container image, Lambda resolves the image tag to an image digest. In Amazon ECR, if\n you update the image tag to a new image, Lambda does not automatically update the function.

          \n
          ", "smithy.api#http": { "method": "PUT", "uri": "/2015-03-31/functions/{FunctionName}/code", @@ -7710,19 +7786,19 @@ "ZipFile": { "target": "com.amazonaws.lambda#Blob", "traits": { - "smithy.api#documentation": "

          The base64-encoded contents of the deployment package. Amazon Web Services SDK and Amazon Web Services CLI clients handle the encoding for\n you.

          " + "smithy.api#documentation": "

          The base64-encoded contents of the deployment package. Amazon Web Services SDK and Amazon Web Services CLI clients \nhandle the encoding for you. Use only with a function defined with a .zip file archive deployment package.

          " } }, "S3Bucket": { "target": "com.amazonaws.lambda#S3Bucket", "traits": { - "smithy.api#documentation": "

          An Amazon S3 bucket in the same Amazon Web Services Region as your function. The bucket can be in a different Amazon Web Services account.

          " + "smithy.api#documentation": "

          An Amazon S3 bucket in the same Amazon Web Services Region as your function. The bucket can be in a different \nAmazon Web Services account. Use only with a function defined with a .zip file archive deployment package.

          " } }, "S3Key": { "target": "com.amazonaws.lambda#S3Key", "traits": { - "smithy.api#documentation": "

          The Amazon S3 key of the deployment package.

          " + "smithy.api#documentation": "

          The Amazon S3 key of the deployment package. Use only with a function defined with a .zip file archive deployment package.

          " } }, "S3ObjectVersion": { @@ -7734,7 +7810,7 @@ "ImageUri": { "target": "com.amazonaws.lambda#String", "traits": { - "smithy.api#documentation": "

          URI of a container image in the Amazon ECR registry.

          " + "smithy.api#documentation": "

          URI of a container image in the Amazon ECR registry. Do not use for a function defined\n with a .zip file archive.

          " } }, "Publish": { diff --git a/codegen/sdk-codegen/aws-models/textract.json b/codegen/sdk-codegen/aws-models/textract.json index 44d7ab403b3f..0b5eab4e1642 100644 --- a/codegen/sdk-codegen/aws-models/textract.json +++ b/codegen/sdk-codegen/aws-models/textract.json @@ -452,6 +452,14 @@ { "value": "SELECTION_ELEMENT", "name": "SELECTION_ELEMENT" + }, + { + "value": "MERGED_CELL", + "name": "MERGED_CELL" + }, + { + "value": "TITLE", + "name": "TITLE" } ] } @@ -687,6 +695,10 @@ { "value": "VALUE", "name": "VALUE" + }, + { + "value": "COLUMN_HEADER", + "name": "COLUMN_HEADER" } ] } @@ -1779,6 +1791,14 @@ { "value": "COMPLEX_FEATURES", "name": "COMPLEX_FEATURES" + }, + { + "value": "MERGED_CELL", + "name": "MERGED_CELL" + }, + { + "value": "TITLE", + "name": "TITLE" } ] } diff --git a/codegen/sdk-codegen/aws-models/transfer.json b/codegen/sdk-codegen/aws-models/transfer.json index eb92edde9747..131f95cca58e 100644 --- a/codegen/sdk-codegen/aws-models/transfer.json +++ b/codegen/sdk-codegen/aws-models/transfer.json @@ -110,13 +110,22 @@ } }, "DestinationFileLocation": { - "target": "com.amazonaws.transfer#InputFileLocation" + "target": "com.amazonaws.transfer#InputFileLocation", + "traits": { + "smithy.api#documentation": "

          Specifies the location for the file being copied. Only applicable for Copy type workflow\n steps. Use ${Transfer:username} in this field to parametrize the destination\n prefix by username.

          " + } }, "OverwriteExisting": { "target": "com.amazonaws.transfer#OverwriteExisting", "traits": { "smithy.api#documentation": "

          A flag that indicates whether or not to overwrite an existing file of the same name.\n The default is FALSE.

          " } + }, + "SourceFileLocation": { + "target": "com.amazonaws.transfer#SourceFileLocation", + "traits": { + "smithy.api#documentation": "

          Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file\n for the workflow.

          \n
            \n
          • \n

            Enter ${previous.file} to use the previous file as the input.\n In this case, this workflow step uses the output file from the previous workflow step as input.\n This is the default value.

            \n
          • \n
          • \n

            Enter ${original.file} to use the originally-uploaded file location as input for this step.

            \n
          • \n
          " + } } }, "traits": { @@ -170,7 +179,7 @@ "HomeDirectoryMappings": { "target": "com.amazonaws.transfer#HomeDirectoryMappings", "traits": { - "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the\n HomeDirectory parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n \n \n

          If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is\n ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place\n holders for your directory. If using the CLI, use the s3api or efsapi call instead of\n s3 or efs so you can use the put-object operation. For example, you use the\n following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make\n sure that the end of the key name ends in a / for it to be considered a folder.

          \n
          " + "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the\n HomeDirectory parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          " } }, "Policy": { @@ -407,7 +416,7 @@ "HomeDirectoryMappings": { "target": "com.amazonaws.transfer#HomeDirectoryMappings", "traits": { - "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n\n

          The following is an Entry and Target pair example.

          \n\n

          \n [ { \"Entry\": \"/directory1\", \"Target\":\n \"/bucket_name/home/mydirectory\" } ]\n

          \n\n

          In most cases, you can use this value instead of the session policy to lock your user\n down to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the HomeDirectory\n parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n \n \n

          If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is\n ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place\n holders for your directory. If using the CLI, use the s3api or efsapi call instead of\n s3 or efs so you can use the put-object operation. For example, you use the\n following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make\n sure that the end of the key name ends in a / for it to be considered a folder.

          \n
          " + "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n\n

          The following is an Entry and Target pair example.

          \n\n

          \n [ { \"Entry\": \"/directory1\", \"Target\":\n \"/bucket_name/home/mydirectory\" } ]\n

          \n\n

          In most cases, you can use this value instead of the session policy to lock your user\n down to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the HomeDirectory\n parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          " } }, "Policy": { @@ -570,6 +579,12 @@ "traits": { "smithy.api#documentation": "

          Timeout, in seconds, for the step.

          " } + }, + "SourceFileLocation": { + "target": "com.amazonaws.transfer#SourceFileLocation", + "traits": { + "smithy.api#documentation": "

          Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file\n for the workflow.

          \n
            \n
          • \n

            Enter ${previous.file} to use the previous file as the input.\n In this case, this workflow step uses the output file from the previous workflow step as input.\n This is the default value.

            \n
          • \n
          • \n

            Enter ${original.file} to use the originally-uploaded file location as input for this step.

            \n
          • \n
          " + } } }, "traits": { @@ -754,6 +769,12 @@ "traits": { "smithy.api#documentation": "

          The name of the step, used as an identifier.

          " } + }, + "SourceFileLocation": { + "target": "com.amazonaws.transfer#SourceFileLocation", + "traits": { + "smithy.api#documentation": "

          Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file\n for the workflow.

          \n
            \n
          • \n

            Enter ${previous.file} to use the previous file as the input.\n In this case, this workflow step uses the output file from the previous workflow step as input.\n This is the default value.

            \n
          • \n
          • \n

            Enter ${original.file} to use the originally-uploaded file location as input for this step.

            \n
          • \n
          " + } } }, "traits": { @@ -1618,9 +1639,9 @@ "traits": { "smithy.api#length": { "min": 1, - "max": 100 + "max": 65536 }, - "smithy.api#pattern": "^(\\/|(\\/(?!\\.)+[^$#<>;`|&?{}^*/\\n]+){1,4})$" + "smithy.api#pattern": "^[^\\x00]+$" } }, "com.amazonaws.transfer#EndpointDetails": { @@ -1882,7 +1903,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Represents an object that contains entries and targets for\n HomeDirectoryMappings.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n \n \n

          If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is\n ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place\n holders for your directory. If using the CLI, use the s3api or efsapi call instead of\n s3 or efs so you can use the put-object operation. For example, you use the\n following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make\n sure that the end of the key name ends in a / for it to be considered a folder.

          \n
          " + "smithy.api#documentation": "

          Represents an object that contains entries and targets for\n HomeDirectoryMappings.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          " } }, "com.amazonaws.transfer#HomeDirectoryMappings": { @@ -3526,6 +3547,16 @@ "smithy.api#pattern": "^[\\w-]*$" } }, + "com.amazonaws.transfer#SourceFileLocation": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "^\\$\\{(\\w+.)+\\w+\\}$" + } + }, "com.amazonaws.transfer#SourceIp": { "type": "string", "traits": { @@ -3831,6 +3862,12 @@ "traits": { "smithy.api#documentation": "

          Array that contains from 1 to 10 key/value pairs.

          " } + }, + "SourceFileLocation": { + "target": "com.amazonaws.transfer#SourceFileLocation", + "traits": { + "smithy.api#documentation": "

          Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file\n for the workflow.

          \n
            \n
          • \n

            Enter ${previous.file} to use the previous file as the input.\n In this case, this workflow step uses the output file from the previous workflow step as input.\n This is the default value.

            \n
          • \n
          • \n

            Enter ${original.file} to use the originally-uploaded file location as input for this step.

            \n
          • \n
          " + } } }, "traits": { @@ -3940,7 +3977,7 @@ "Message": { "target": "com.amazonaws.transfer#Message", "traits": { - "smithy.api#documentation": "

          A message that indicates whether the test was successful or not.

          " + "smithy.api#documentation": "

          A message that indicates whether the test was successful or not.

          \n \n

          If an empty string is returned, the most likely cause is that the authentication failed due to an incorrect username or password.

          \n
          " } }, "Url": { @@ -4193,7 +4230,7 @@ "HomeDirectoryMappings": { "target": "com.amazonaws.transfer#HomeDirectoryMappings", "traits": { - "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the\n HomeDirectory parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n \n \n

          If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is\n ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place\n holders for your directory. If using the CLI, use the s3api or efsapi call instead of\n s3 or efs so you can use the put-object operation. For example, you use the\n following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make\n sure that the end of the key name ends in a / for it to be considered a folder.

          \n
          " + "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to / and set Target to the\n HomeDirectory parameter value.

          \n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          " } }, "Policy": { @@ -4421,7 +4458,7 @@ "HomeDirectoryMappings": { "target": "com.amazonaws.transfer#HomeDirectoryMappings", "traits": { - "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n\n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\":\n \"/bucket_name/home/mydirectory\" } ]\n

          \n\n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to '/' and set Target to the HomeDirectory\n parameter value.

          \n\n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          \n \n \n

          If the target of a logical directory entry does not exist in Amazon S3 or EFS, the entry is\n ignored. As a workaround, you can use the Amazon S3 API or EFS API to create 0 byte objects as place\n holders for your directory. If using the CLI, use the s3api or efsapi call instead of\n s3 or efs so you can use the put-object operation. For example, you use the\n following: aws s3api put-object --bucket bucketname --key path/to/folder/. Make\n sure that the end of the key name ends in a / for it to be considered a folder.

          \n
          " + "smithy.api#documentation": "

          Logical directory mappings that specify what Amazon S3 or Amazon EFS paths and keys should\n be visible to your user and how you want to make them visible. You must specify the\n Entry and Target pair, where Entry shows how the path\n is made visible and Target is the actual Amazon S3 or Amazon EFS path. If you\n only specify a target, it is displayed as is. You also must ensure that your Amazon Web Services Identity\n and Access Management (IAM) role provides access to paths in Target. This value\n can only be set when HomeDirectoryType is set to\n LOGICAL.

          \n\n

          The following is an Entry and Target pair example.

          \n

          \n [ { \"Entry\": \"/directory1\", \"Target\":\n \"/bucket_name/home/mydirectory\" } ]\n

          \n\n

          In most cases, you can use this value instead of the session policy to lock down your\n user to the designated home directory (\"chroot\"). To do this, you can set\n Entry to '/' and set Target to the HomeDirectory\n parameter value.

          \n\n

          The following is an Entry and Target pair example for chroot.

          \n

          \n [ { \"Entry:\": \"/\", \"Target\": \"/bucket_name/home/mydirectory\" } ]\n

          " } }, "Policy": { diff --git a/codegen/sdk-codegen/aws-models/translate.json b/codegen/sdk-codegen/aws-models/translate.json index 9f4730e20bd4..3672b840a36f 100644 --- a/codegen/sdk-codegen/aws-models/translate.json +++ b/codegen/sdk-codegen/aws-models/translate.json @@ -482,6 +482,21 @@ ] } }, + "com.amazonaws.translate#Formality": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "FORMAL", + "name": "FORMAL" + }, + { + "value": "INFORMAL", + "name": "INFORMAL" + } + ] + } + }, "com.amazonaws.translate#GetParallelData": { "type": "operation", "input": { @@ -2110,6 +2125,9 @@ "com.amazonaws.translate#TranslationSettings": { "type": "structure", "members": { + "Formality": { + "target": "com.amazonaws.translate#Formality" + }, "Profanity": { "target": "com.amazonaws.translate#Profanity", "traits": { diff --git a/codegen/sdk-codegen/aws-models/wafv2.json b/codegen/sdk-codegen/aws-models/wafv2.json index 3402b961c775..daba23728bef 100644 --- a/codegen/sdk-codegen/aws-models/wafv2.json +++ b/codegen/sdk-codegen/aws-models/wafv2.json @@ -1718,7 +1718,7 @@ "Addresses": { "target": "com.amazonaws.wafv2#IPAddresses", "traits": { - "smithy.api#documentation": "

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Examples:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          ", + "smithy.api#documentation": "

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Example address strings:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          \n

          Example JSON Addresses specifications:

          \n
            \n
          • \n

            Empty array: \"Addresses\": [] \n

            \n
          • \n
          • \n

            Array with one address: \"Addresses\": [\"192.0.2.44/32\"] \n

            \n
          • \n
          • \n

            Array with three addresses: \"Addresses\": [\"192.0.2.44/32\", \"192.0.2.0/24\", \"192.0.0.0/16\"] \n

            \n
          • \n
          • \n

            INVALID specification: \"Addresses\": [\"\"] INVALID

            \n
          • \n
          ", "smithy.api#required": {} } }, @@ -3193,7 +3193,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Generates a presigned download URL for the specified release of the mobile SDK.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

          " + "smithy.api#documentation": "

          Generates a presigned download URL for the specified release of the mobile SDK.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see \nWAF client application integration in the WAF Developer Guide.

          " } }, "com.amazonaws.wafv2#GenerateMobileSdkReleaseUrlRequest": { @@ -3456,7 +3456,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Retrieves information for the specified mobile SDK release, including release notes and tags.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

          " + "smithy.api#documentation": "

          Retrieves information for the specified mobile SDK release, including release notes and tags.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see \nWAF client application integration in the WAF Developer Guide.

          " } }, "com.amazonaws.wafv2#GetMobileSdkReleaseRequest": { @@ -3966,7 +3966,7 @@ "ApplicationIntegrationURL": { "target": "com.amazonaws.wafv2#OutputUrl", "traits": { - "smithy.api#documentation": "

          The URL to use in SDK integrations with Amazon Web Services managed rule groups. For example, you can use the integration SDKs with the account takeover prevention managed rule group AWSManagedRulesATPRuleSet. This is only populated if you are using a rule group in your web ACL that integrates with your applications in this way. For more information, see WAF application integration in the WAF Developer Guide.

          " + "smithy.api#documentation": "

          The URL to use in SDK integrations with Amazon Web Services managed rule groups. For example, you can use the integration SDKs with the account takeover prevention managed rule group AWSManagedRulesATPRuleSet. This is only populated if you are using a rule group in your web ACL that integrates with your applications in this way. For more information, see WAF client application integration in the WAF Developer Guide.

          " } } } @@ -4124,13 +4124,13 @@ "Addresses": { "target": "com.amazonaws.wafv2#IPAddresses", "traits": { - "smithy.api#documentation": "

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Examples:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          ", + "smithy.api#documentation": "

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Example address strings:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          \n

          Example JSON Addresses specifications:

          \n
            \n
          • \n

            Empty array: \"Addresses\": [] \n

            \n
          • \n
          • \n

            Array with one address: \"Addresses\": [\"192.0.2.44/32\"] \n

            \n
          • \n
          • \n

            Array with three addresses: \"Addresses\": [\"192.0.2.44/32\", \"192.0.2.0/24\", \"192.0.0.0/16\"] \n

            \n
          • \n
          • \n

            INVALID specification: \"Addresses\": [\"\"] INVALID

            \n
          • \n
          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Contains one or more IP addresses or blocks of IP addresses specified in Classless\n Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges\n except for /0. For information about CIDR notation, see the Wikipedia entry Classless\n Inter-Domain Routing.

          \n

          WAF assigns an ARN to each IPSet that you create. To use an IP set in a\n rule, you provide the ARN to the Rule statement IPSetReferenceStatement.

          " + "smithy.api#documentation": "

          Contains zero or more IP addresses or blocks of IP addresses specified in Classless\n Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges\n except for /0. For information about CIDR notation, see the Wikipedia entry Classless\n Inter-Domain Routing.

          \n

          WAF assigns an ARN to each IPSet that you create. To use an IP set in a\n rule, you provide the ARN to the Rule statement IPSetReferenceStatement.

          " } }, "com.amazonaws.wafv2#IPSetForwardedIPConfig": { @@ -4809,7 +4809,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Retrieves a list of the available releases for the mobile SDK and the specified device platform.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

          " + "smithy.api#documentation": "

          Retrieves a list of the available releases for the mobile SDK and the specified device platform.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see \nWAF client application integration in the WAF Developer Guide.

          " } }, "com.amazonaws.wafv2#ListMobileSdkReleasesRequest": { @@ -5208,7 +5208,7 @@ "LogDestinationConfigs": { "target": "com.amazonaws.wafv2#LogDestinationConfigs", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Names (ARNs) of the logging destinations that you want to associate\n with the web ACL.

          ", + "smithy.api#documentation": "

          The logging destination configuration that you want to associate\n with the web ACL.

          \n \n

          You can associate one logging destination to a web ACL.

          \n
          ", "smithy.api#required": {} } }, @@ -5232,7 +5232,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Defines an association between logging destinations and a web ACL\n resource, for logging from WAF. As part of the association, you can specify parts of\n the standard logging fields to keep out of the logs and you can specify filters so that you\n log only a subset of the logging records.

          \n

          For information about configuring web ACL logging destinations, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

          " + "smithy.api#documentation": "

          Defines an association between logging destinations and a web ACL\n resource, for logging from WAF. As part of the association, you can specify parts of\n the standard logging fields to keep out of the logs and you can specify filters so that you\n log only a subset of the logging records.

          \n \n

          You can define one logging destination per web ACL.

          \n
          \n

          You can access information about the traffic that WAF inspects using the following\n steps:

          \n
            \n
          1. \n

            Create your logging destination. You can use an Amazon CloudWatch Logs log group, an Amazon Simple Storage Service (Amazon S3) bucket, or an Amazon Kinesis Data Firehose. \n For information about configuring logging destinations and the permissions that are required for each, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

            \n
          2. \n
          3. \n

            Associate your logging destination to your web ACL using a\n PutLoggingConfiguration request.

            \n
          4. \n
          \n

          When you successfully enable logging using a PutLoggingConfiguration\n request, WAF creates an additional role or policy that is required to write\n logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group.\n For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

          \n

          For additional information about web ACL logging, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

          " } }, "com.amazonaws.wafv2#LoggingConfigurations": { @@ -5279,7 +5279,7 @@ "LoginPath": { "target": "com.amazonaws.wafv2#LoginPathString", "traits": { - "smithy.api#documentation": "

          The login endpoint for your application. For example https://example.com/web/login.

          " + "smithy.api#documentation": "

          The path of the login endpoint for your application. For example, for the URL https://example.com/web/login, you would provide the path /web/login.

          " } }, "PayloadType": { @@ -5613,7 +5613,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Information for a release of the mobile SDK, including release notes and tags.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF.

          " + "smithy.api#documentation": "

          Information for a release of the mobile SDK, including release notes and tags.

          \n

          The mobile SDK is not generally available. Customers who have access to the mobile SDK can use it to establish and manage Security Token Service (STS) security tokens for use in HTTP(S) requests from a mobile device to WAF. For more information, see \nWAF client application integration in the WAF Developer Guide.

          " } }, "com.amazonaws.wafv2#NextMarker": { @@ -6076,7 +6076,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Enables the specified LoggingConfiguration, to start logging from a\n web ACL, according to the configuration provided.

          \n

          You can access information about all traffic that WAF inspects using the following\n steps:

          \n
            \n
          1. \n

            Create your logging destination. You can use an Amazon CloudWatch Logs log group, an Amazon Simple Storage Service (Amazon S3) bucket, or an Amazon Kinesis Data Firehose. \n For information about configuring logging destinations and the permissions that are required for each, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

            \n
          2. \n
          3. \n

            Associate your logging destination to your web ACL using a\n PutLoggingConfiguration request.

            \n
          4. \n
          \n

          When you successfully enable logging using a PutLoggingConfiguration\n request, WAF creates an additional role or policy that is required to write\n logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group.\n For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

          \n \n

          This operation completely replaces the mutable specifications that you already have for the logging configuration with the ones that you provide to this call. To modify the logging configuration, retrieve it by calling GetLoggingConfiguration, update the settings as needed, and then provide the complete logging configuration specification to this call.

          \n
          " + "smithy.api#documentation": "

          Enables the specified LoggingConfiguration, to start logging from a\n web ACL, according to the configuration provided.

          \n \n

          You can define one logging destination per web ACL.

          \n
          \n

          You can access information about the traffic that WAF inspects using the following\n steps:

          \n
            \n
          1. \n

            Create your logging destination. You can use an Amazon CloudWatch Logs log group, an Amazon Simple Storage Service (Amazon S3) bucket, or an Amazon Kinesis Data Firehose. \n For information about configuring logging destinations and the permissions that are required for each, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

            \n
          2. \n
          3. \n

            Associate your logging destination to your web ACL using a\n PutLoggingConfiguration request.

            \n
          4. \n
          \n

          When you successfully enable logging using a PutLoggingConfiguration\n request, WAF creates an additional role or policy that is required to write\n logs to the logging destination. For an Amazon CloudWatch Logs log group, WAF creates a resource policy on the log group.\n For an Amazon S3 bucket, WAF creates a bucket policy. For an Amazon Kinesis Data Firehose, WAF creates a service-linked role.

          \n

          For additional information about web ACL logging, see \n Logging web ACL traffic information \n in the WAF Developer Guide.

          \n \n

          This operation completely replaces the mutable specifications that you already have for the logging configuration with the ones that you provide to this call. To modify the logging configuration, retrieve it by calling GetLoggingConfiguration, update the settings as needed, and then provide the complete logging configuration specification to this call.

          \n
          " } }, "com.amazonaws.wafv2#PutLoggingConfigurationRequest": { @@ -7635,7 +7635,7 @@ "Addresses": { "target": "com.amazonaws.wafv2#IPAddresses", "traits": { - "smithy.api#documentation": "

          Contains an array of strings that specify one or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Examples:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          ", + "smithy.api#documentation": "

          Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for /0.

          \n

          Example address strings:

          \n
            \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify 192.0.2.44/32.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify \n 192.0.2.0/24.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify 1111:0000:0000:0000:0000:0000:0000:0111/128.

            \n
          • \n
          • \n

            To configure WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify 1111:0000:0000:0000:0000:0000:0000:0000/64.

            \n
          • \n
          \n

          For more information about CIDR notation, see the Wikipedia entry Classless Inter-Domain Routing.

          \n

          Example JSON Addresses specifications:

          \n
            \n
          • \n

            Empty array: \"Addresses\": [] \n

            \n
          • \n
          • \n

            Array with one address: \"Addresses\": [\"192.0.2.44/32\"] \n

            \n
          • \n
          • \n

            Array with three addresses: \"Addresses\": [\"192.0.2.44/32\", \"192.0.2.0/24\", \"192.0.0.0/16\"] \n

            \n
          • \n
          • \n

            INVALID specification: \"Addresses\": [\"\"] INVALID

            \n
          • \n
          ", "smithy.api#required": {} } }, diff --git a/codegen/smithy-aws-typescript-codegen/src/main/resources/software/amazon/smithy/aws/typescript/codegen/endpoints.json b/codegen/smithy-aws-typescript-codegen/src/main/resources/software/amazon/smithy/aws/typescript/codegen/endpoints.json index ceb5e61d0df0..51bd7bd97f8f 100644 --- a/codegen/smithy-aws-typescript-codegen/src/main/resources/software/amazon/smithy/aws/typescript/codegen/endpoints.json +++ b/codegen/smithy-aws-typescript-codegen/src/main/resources/software/amazon/smithy/aws/typescript/codegen/endpoints.json @@ -1056,6 +1056,106 @@ } } }, + "api.tunneling.iot": { + "defaults": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags": ["fips"] + } + ] + }, + "endpoints": { + "ap-east-1": {}, + "ap-northeast-1": {}, + "ap-northeast-2": {}, + "ap-south-1": {}, + "ap-southeast-1": {}, + "ap-southeast-2": {}, + "ca-central-1": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.ca-central-1.amazonaws.com", + "tags": ["fips"] + } + ] + }, + "eu-central-1": {}, + "eu-north-1": {}, + "eu-west-1": {}, + "eu-west-2": {}, + "eu-west-3": {}, + "fips-ca-central-1": { + "credentialScope": { + "region": "ca-central-1" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1": { + "credentialScope": { + "region": "us-east-1" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2": { + "credentialScope": { + "region": "us-east-2" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1": { + "credentialScope": { + "region": "us-west-1" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2": { + "credentialScope": { + "region": "us-west-2" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-west-2.amazonaws.com" + }, + "me-south-1": {}, + "sa-east-1": {}, + "us-east-1": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-east-1.amazonaws.com", + "tags": ["fips"] + } + ] + }, + "us-east-2": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-east-2.amazonaws.com", + "tags": ["fips"] + } + ] + }, + "us-west-1": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-west-1.amazonaws.com", + "tags": ["fips"] + } + ] + }, + "us-west-2": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-west-2.amazonaws.com", + "tags": ["fips"] + } + ] + } + } + }, "apigateway": { "endpoints": { "af-south-1": {}, @@ -8630,6 +8730,7 @@ }, "mq": { "endpoints": { + "af-south-1": {}, "ap-east-1": {}, "ap-northeast-1": {}, "ap-northeast-2": {}, @@ -14109,6 +14210,12 @@ "cn-northwest-1": {} } }, + "api.tunneling.iot": { + "endpoints": { + "cn-north-1": {}, + "cn-northwest-1": {} + } + }, "apigateway": { "endpoints": { "cn-north-1": {}, @@ -15143,6 +15250,14 @@ } }, "acm": { + "defaults": { + "variants": [ + { + "hostname": "acm.{region}.{dnsSuffix}", + "tags": ["fips"] + } + ] + }, "endpoints": { "us-gov-east-1": { "credentialScope": { @@ -15372,6 +15487,48 @@ } } }, + "api.tunneling.iot": { + "defaults": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags": ["fips"] + } + ] + }, + "endpoints": { + "fips-us-gov-east-1": { + "credentialScope": { + "region": "us-gov-east-1" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1": { + "credentialScope": { + "region": "us-gov-west-1" + }, + "deprecated": true, + "hostname": "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", + "tags": ["fips"] + } + ] + }, + "us-gov-west-1": { + "variants": [ + { + "hostname": "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", + "tags": ["fips"] + } + ] + } + } + }, "apigateway": { "endpoints": { "us-gov-east-1": {}, @@ -16174,6 +16331,14 @@ } }, "ec2": { + "defaults": { + "variants": [ + { + "hostname": "ec2.{region}.{dnsSuffix}", + "tags": ["fips"] + } + ] + }, "endpoints": { "us-gov-east-1": { "credentialScope": { diff --git a/lib/lib-dynamodb/src/commands/ExecuteStatementCommand.ts b/lib/lib-dynamodb/src/commands/ExecuteStatementCommand.ts index d868acaf13d2..3ae5ce916c1b 100644 --- a/lib/lib-dynamodb/src/commands/ExecuteStatementCommand.ts +++ b/lib/lib-dynamodb/src/commands/ExecuteStatementCommand.ts @@ -14,8 +14,9 @@ export type ExecuteStatementCommandInput = Omit<__ExecuteStatementCommandInput, Parameters?: NativeAttributeValue[]; }; -export type ExecuteStatementCommandOutput = Omit<__ExecuteStatementCommandOutput, "Items"> & { +export type ExecuteStatementCommandOutput = Omit<__ExecuteStatementCommandOutput, "Items" | "LastEvaluatedKey"> & { Items?: { [key: string]: NativeAttributeValue }[]; + LastEvaluatedKey?: { [key: string]: NativeAttributeValue }; }; /** @@ -31,7 +32,7 @@ export class ExecuteStatementCommand extends $Command< DynamoDBDocumentClientResolvedConfig > { private readonly inputKeyNodes = [{ key: "Parameters" }]; - private readonly outputKeyNodes = [{ key: "Items" }]; + private readonly outputKeyNodes = [{ key: "Items" }, { key: "LastEvaluatedKey" }]; constructor(readonly input: ExecuteStatementCommandInput) { super();